Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions src/SOFIE_core/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,8 @@ set(source_headers
SOFIE/ROperator_Erf.hxx
SOFIE/ROperator_Swish.hxx
SOFIE/ROperator_Elu.hxx
SOFIE/ROperator_HardSigmoid.hxx
SOFIE/ROperator_HardSwish.hxx
SOFIE/ROperator_Comparision.hxx
SOFIE/ROperator_EyeLike.hxx
SOFIE/ROperator_Range.hxx
Expand Down
12 changes: 10 additions & 2 deletions src/SOFIE_core/inc/SOFIE/ROperator_BasicUnary.hxx
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

namespace SOFIE {

enum class EBasicUnaryOperator { kReciprocal, kSqrt , kNeg, kExp, kLog, kSin, kCos, kAbs };
enum class EBasicUnaryOperator { kReciprocal, kSqrt , kNeg, kExp, kLog, kSin, kCos, kAbs, kSoftplus };

template <typename T, EBasicUnaryOperator Op>
struct UnaryOpTraits {
Expand Down Expand Up @@ -62,6 +62,14 @@ struct UnaryOpTraits<T, EBasicUnaryOperator::kAbs> {
static std::string Op(const std::string &X) { return "std::abs(" + X + ")"; }
};

template <typename T>
struct UnaryOpTraits<T, EBasicUnaryOperator::kSoftplus> {
static std::string Name() { return "Softplus"; }
static std::string Op(const std::string &X) {
return "((" + X + " >= 0x1.4000000000000p+4f) ? " + X + " : std::log1p(std::exp(" + X + ")))";
}
};

template <typename T, EBasicUnaryOperator Op>
class ROperator_BasicUnary final : public ROperator {
private:
Expand Down Expand Up @@ -108,7 +116,7 @@ public:
}

std::vector<std::string> GetStdLibs() override {
if (Op == EBasicUnaryOperator::kSqrt || Op == EBasicUnaryOperator::kExp || Op == EBasicUnaryOperator::kLog) {
if (Op == EBasicUnaryOperator::kSqrt || Op == EBasicUnaryOperator::kExp || Op == EBasicUnaryOperator::kLog || Op == EBasicUnaryOperator::kSoftplus) { {
return { std::string("cmath") };
} else {
return {};
Expand Down
70 changes: 70 additions & 0 deletions src/SOFIE_core/inc/SOFIE/ROperator_HardSigmoid.hxx
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
#ifndef SOFIE_ROPERATOR_HARDSIGMOID
#define SOFIE_ROPERATOR_HARDSIGMOID

#include <SOFIE/SOFIE_common.hxx>
#include <SOFIE/ROperator.hxx>
#include <SOFIE/RModel.hxx>

#include <sstream>

namespace SOFIE {

template <typename T>
class ROperator_HardSigmoid final : public ROperator
{

private:

std::string fNX;
std::string fNY;
std::vector<size_t> fShape;
float fAlpha;
float fBeta;

public:
ROperator_HardSigmoid(){}
ROperator_HardSigmoid(std::string nameX, std::string nameY, float alpha, float beta):
fNX(UTILITY::Clean_name(nameX)), fNY(UTILITY::Clean_name(nameY)), fAlpha(alpha), fBeta(beta){
fInputTensorNames = { fNX };
fOutputTensorNames = { fNY };
}

std::vector<ETensorType> TypeInference(std::vector<ETensorType> input) override {
return input;
}

std::vector<std::vector<size_t>> ShapeInference(std::vector<std::vector<size_t>> input) override {
return input;
}

void Initialize(RModel& model) override {
if (!model.CheckIfTensorAlreadyExist(fNX)){
throw std::runtime_error("SOFIE HardSigmoid Op Input Tensor " + fNX + " is not found in model");
}
fShape = model.GetTensorShape(fNX);
model.AddIntermediateTensor(fNY, model.GetTensorType(fNX), fShape);
}

std::string Generate(std::string OpName) override {
OpName = "op_" + OpName;
if (fShape.empty()){
throw std::runtime_error("SOFIE HardSigmoid operator called to Generate without being initialized first");
}
std::stringstream out;
size_t length = ConvertShapeToLength(fShape);

// HardSigmoid: y = max(0, min(1, alpha * x + beta))
out << "\n//------ HardSigmoid\n";
out << SP << "for (int id = 0; id < " << length << " ; id++){\n";
out << SP << SP << "tensor_" << fNY << "[id] = std::fmax(0x0p+0f, std::fmin(0x1p+0f, "
<< fAlpha << "f * tensor_" << fNX << "[id] + " << fBeta << "f));\n";
out << SP << "}\n";
return out.str();
}

std::vector<std::string> GetStdLibs() override { return { std::string("cmath") };}
};

} // namespace SOFIE

#endif
70 changes: 70 additions & 0 deletions src/SOFIE_core/inc/SOFIE/ROperator_HardSwish.hxx
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
#ifndef SOFIE_ROPERATOR_HARDSWISH
#define SOFIE_ROPERATOR_HARDSWISH

#include <SOFIE/SOFIE_common.hxx>
#include <SOFIE/ROperator.hxx>
#include <SOFIE/RModel.hxx>

#include <sstream>

namespace SOFIE {

template <typename T>
class ROperator_HardSwish final : public ROperator
{

private:

std::string fNX;
std::string fNY;
std::vector<size_t> fShape;

public:
ROperator_HardSwish(){}
ROperator_HardSwish(std::string nameX, std::string nameY):
fNX(UTILITY::Clean_name(nameX)), fNY(UTILITY::Clean_name(nameY)){
fInputTensorNames = { fNX };
fOutputTensorNames = { fNY };
}

std::vector<ETensorType> TypeInference(std::vector<ETensorType> input) override {
return input;
}

std::vector<std::vector<size_t>> ShapeInference(std::vector<std::vector<size_t>> input) override {
return input;
}

void Initialize(RModel& model) override {
if (!model.CheckIfTensorAlreadyExist(fNX)){
throw std::runtime_error("SOFIE HardSwish Op Input Tensor " + fNX + " is not found in model");
}
fShape = model.GetTensorShape(fNX);
model.AddIntermediateTensor(fNY, model.GetTensorType(fNX), fShape);
}

std::string Generate(std::string OpName) override {
OpName = "op_" + OpName;
if (fShape.empty()){
throw std::runtime_error("SOFIE HardSwish operator called to Generate without being initialized first");
}
std::stringstream out;
size_t length = ConvertShapeToLength(fShape);

// HardSwish: y = x * max(0, min(1, x/6 + 0.5))
// Split topology for debuggability
out << "\n//------ HardSwish\n";
out << SP << "for (int id = 0; id < " << length << " ; id++){\n";
out << SP << SP << "float h = 0x1.5555555555555p-3f * tensor_" << fNX << "[id] + 0x1p-1f;\n";
out << SP << SP << "tensor_" << fNY << "[id] = tensor_" << fNX
<< "[id] * std::fmax(0x0p+0f, std::fmin(0x1p+0f, h));\n";
out << SP << "}\n";
return out.str();
}

std::vector<std::string> GetStdLibs() override { return { std::string("cmath") };}
};

} // namespace SOFIE

#endif
2 changes: 2 additions & 0 deletions src/SOFIE_parsers/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,8 @@ set(sources_cxx
src/ParseExpand.cxx
src/ParseGather.cxx
src/ParseElu.cxx
src/ParseHardSigmoid.cxx
src/ParseHardSwish.cxx
src/ParseFuseConvAdd.cxx
src/ParseFuseConvTransposeAdd.cxx
src/ParseFuseGemmRelu.cxx
Expand Down
5 changes: 5 additions & 0 deletions src/SOFIE_parsers/src/ParseBasicUnary.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -79,5 +79,10 @@ ParserFuncSignature ParseAbs = [](RModelParser_ONNX &parser, const onnx::NodePro
return ParseBasicUnary<EBasicUnaryOperator::kAbs>(parser, nodeproto);
};

// Parse Softplus
ParserFuncSignature ParseSoftplus = [](RModelParser_ONNX &parser, const onnx::NodeProto &nodeproto) {
return ParseBasicUnary<EBasicUnaryOperator::kSoftplus>(parser, nodeproto);
};

} // namespace SOFIE

47 changes: 47 additions & 0 deletions src/SOFIE_parsers/src/ParseHardSigmoid.cxx
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
#include "SOFIE/RModelParser_ONNX.hxx"
#include "SOFIE/ROperator_HardSigmoid.hxx"
#include "onnx_proto3.pb.h"

namespace SOFIE {

ParserFuncSignature ParseHardSigmoid = [](RModelParser_ONNX &parser, const onnx::NodeProto &nodeproto) {
ETensorType input_type;

// ONNX spec defaults: alpha=0.2, beta=0.5
float alpha = 0.2f;
float beta = 0.5f;

for (int_t i = 0; i < nodeproto.attribute_size(); i++) {
std::string attribute_name = nodeproto.attribute(i).name();
if (attribute_name == "alpha")
alpha = nodeproto.attribute(i).f();
else if (attribute_name == "beta")
beta = nodeproto.attribute(i).f();
}

auto input_name = nodeproto.input(0);
if (parser.IsRegisteredTensorType(input_name)) {
input_type = parser.GetTensorType(input_name);
} else {
throw std::runtime_error("TMVA::SOFIE ONNX Parser HardSigmoid op has input tensor " + input_name +
" but its type is not yet registered");
}

std::unique_ptr<ROperator> op;
std::string output_name = nodeproto.output(0);

switch (input_type) {
case ETensorType::FLOAT: op.reset(new ROperator_HardSigmoid<float>(input_name, output_name, alpha, beta)); break;
default:
throw std::runtime_error("TMVA::SOFIE - Unsupported - Operator HardSigmoid does not yet support input type " +
std::to_string(static_cast<int>(input_type)));
}

if (!parser.IsRegisteredTensorType(output_name)) {
parser.RegisterTensorType(output_name, input_type);
}

return op;
};

} // namespace SOFIE
35 changes: 35 additions & 0 deletions src/SOFIE_parsers/src/ParseHardSwish.cxx
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
#include "SOFIE/RModelParser_ONNX.hxx"
#include "SOFIE/ROperator_HardSwish.hxx"
#include "onnx_proto3.pb.h"

namespace SOFIE {

ParserFuncSignature ParseHardSwish = [](RModelParser_ONNX &parser, const onnx::NodeProto &nodeproto) {
ETensorType input_type;

auto input_name = nodeproto.input(0);
if (parser.IsRegisteredTensorType(input_name)) {
input_type = parser.GetTensorType(input_name);
} else {
throw std::runtime_error("TMVA::SOFIE ONNX Parser HardSwish op has input tensor " + input_name +
" but its type is not yet registered");
}

std::unique_ptr<ROperator> op;
std::string output_name = nodeproto.output(0);

switch (input_type) {
case ETensorType::FLOAT: op.reset(new ROperator_HardSwish<float>(input_name, output_name)); break;
default:
throw std::runtime_error("TMVA::SOFIE - Unsupported - Operator HardSwish does not yet support input type " +
std::to_string(static_cast<int>(input_type)));
}

if (!parser.IsRegisteredTensorType(output_name)) {
parser.RegisterTensorType(output_name, input_type);
}

return op;
};

} // namespace SOFIE
6 changes: 6 additions & 0 deletions src/SOFIE_parsers/src/RModelParser_ONNX.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,9 @@ extern ParserFuncSignature ParseLayerNormalization;
extern ParserFuncSignature ParseGather;
extern ParserFuncSignature ParseErf;
extern ParserFuncSignature ParseElu;
extern ParserFuncSignature ParseHardSigmoid;
extern ParserFuncSignature ParseHardSwish;
extern ParserFuncSignature ParseSoftplus;
extern ParserFuncSignature ParseEyeLike;
extern ParserFuncSignature ParseRange;
extern ParserFuncSignature ParseTopK;
Expand Down Expand Up @@ -219,6 +222,9 @@ RModelParser_ONNX::RModelParser_ONNX() noexcept : fOperatorsMapImpl(std::make_un
RegisterOperator("Gather", ParseGather);
RegisterOperator("Erf", ParseErf);
RegisterOperator("Elu", ParseElu);
RegisterOperator("HardSigmoid", ParseHardSigmoid);
RegisterOperator("HardSwish", ParseHardSwish);
RegisterOperator("Softplus", ParseSoftplus);
RegisterOperator("EyeLike", ParseEyeLike);
RegisterOperator("Range", ParseRange);
RegisterOperator("TopK", ParseTopK);
Expand Down