* @OpCode -- operator ID form enum.
* @CreatorFunc -- method of the RandomModelBuilder class for creation this operator.
* DEF_OPERATOR(Name, OpCode, CreatorFunc)
+ *
+ * Comments in this file is operations which are recommended for implementation.
+ * @todo: support not supported among the following set.
+ *
*/
#ifndef DEF_OPERATOR
#endif
/*
DEF_OPERATOR("ADD", Op_ADD, tflite::BuiltinOperator::BuiltinOperator_ADD, createLayerADD )
-DEF_OPERATOR("CONCATENATION", Op_CONCATENATION, tflite::BuiltinOperator::BuiltinOperator_CONCATENATION, createLayerCONCATENATION )
*/
-DEF_OPERATOR("CONV_2D", OpConv2d, tflite::BuiltinOperator::BuiltinOperator_CONV_2D, createLayerCONV_2D )
+DEF_OPERATOR("CONV_2D", OpConv2d, tflite::BuiltinOperator::BuiltinOperator_CONV_2D, createLayerCONV_2D )
+DEF_OPERATOR("CONCATENATION", OpConcatenation, tflite::BuiltinOperator::BuiltinOperator_CONCATENATION, createLayerCONCATENATION )
+DEF_OPERATOR("DEPTHWISE_CONV_2D", OpDepthwiseConv2d, tflite::BuiltinOperator::BuiltinOperator_DEPTHWISE_CONV_2D, createLayerDEPTHWISE_CONV_2D )
+DEF_OPERATOR("MAX_POOL_2D", OpMaxPool2d, tflite::BuiltinOperator::BuiltinOperator_MAX_POOL_2D, createLayerX_POOL_2D )
+DEF_OPERATOR("AVERAGE_POOL_2D", OpAveragePool2d, tflite::BuiltinOperator::BuiltinOperator_AVERAGE_POOL_2D, createLayerX_POOL_2D )
+DEF_OPERATOR("SOFTMAX", OpSoftmax, tflite::BuiltinOperator::BuiltinOperator_SOFTMAX, createLayerSOFTMAX )
+DEF_OPERATOR("FULLY_CONNECTED", OpFullyConnected, tflite::BuiltinOperator::BuiltinOperator_FULLY_CONNECTED, createLayerFULLY_CONNECTED )
/*
-DEF_OPERATOR("DEPTHWISE_CONV_2D", Op_DEPTHWISE_CONV_2D, tflite::BuiltinOperator::BuiltinOperator_DEPTHWISE_CONV_2D, createLayerDEPTHWISE_CONV_2D )
-DEF_OPERATOR("MAX_POOL_2D", Op_MAX_POOL_2D, tflite::BuiltinOperator::BuiltinOperator_MAX_POOL_2D, createLayerX_POOL_2D )
-DEF_OPERATOR("AVERAGE_POOL_2D", Op_AVERAGE_POOL_2D, tflite::BuiltinOperator::BuiltinOperator_AVERAGE_POOL_2D, createLayerX_POOL_2D )
DEF_OPERATOR("DEQUANTIZE", Op_DEQUANTIZE, tflite::BuiltinOperator::BuiltinOperator_DEQUANTIZE, createLayerDEQUANTIZE )
DEF_OPERATOR("EMBEDDING_LOOKUP", Op_EMBEDDING_LOOKUP, tflite::BuiltinOperator::BuiltinOperator_EMBEDDING_LOOKUP, createLayerEMBEDDING_LOOKUP )
DEF_OPERATOR("FLOOR", Op_FLOOR, tflite::BuiltinOperator::BuiltinOperator_FLOOR, createLayerFLOOR )
-DEF_OPERATOR("FULLY_CONNECTED", Op_FULLY_CONNECTED, tflite::BuiltinOperator::BuiltinOperator_FULLY_CONNECTED, createLayerFULLY_CONNECTED )
DEF_OPERATOR("HASHTABLE_LOOKUP", Op_HASHTABLE_LOOKUP, tflite::BuiltinOperator::BuiltinOperator_HASHTABLE_LOOKUP, createLayerHASHTABLE_LOOKUP )
DEF_OPERATOR("L2_NORMALIZATION", Op_L2_NORMALIZATION, tflite::BuiltinOperator::BuiltinOperator_L2_NORMALIZATION, createLayerL2_NORMALIZATION )
DEF_OPERATOR("L2_POOL_2D", Op_L2_POOL_2D, tflite::BuiltinOperator::BuiltinOperator_L2_POOL_2D, createLayerL2_POOL_2D )
DEF_OPERATOR("RESHAPE", Op_RESHAPE, tflite::BuiltinOperator::BuiltinOperator_RESHAPE, createLayerRESHAPE )
DEF_OPERATOR("RESIZE_BILINEAR", Op_RESIZE_BILINEAR, tflite::BuiltinOperator::BuiltinOperator_RESIZE_BILINEAR, createLayerRESIZE_BILINEAR )
DEF_OPERATOR("RNN", Op_RNN, tflite::BuiltinOperator::BuiltinOperator_RNN, createLayerRNN )
-DEF_OPERATOR("SOFTMAX", Op_SOFTMAX, tflite::BuiltinOperator::BuiltinOperator_SOFTMAX, createLayerSOFTMAX )
DEF_OPERATOR("SPACE_TO_DEPTH", Op_SPACE_TO_DEPTH, tflite::BuiltinOperator::BuiltinOperator_SPACE_TO_DEPTH, createLayerSPACE_TO_DEPTH )
DEF_OPERATOR("SVDF", Op_SVDF, tflite::BuiltinOperator::BuiltinOperator_SVDF, createLayerSVDF )
DEF_OPERATOR("TANH", Op_TANH, tflite::BuiltinOperator::BuiltinOperator_TANH, createLayerTANH )
* @details Be careful, opCreators's initializer list should save the order
* from the OpCode enum.
*/
- RandomModelBuilder() : gen(rd()), floatRand(std::numeric_limits<float>::min(),
- std::numeric_limits<float>::max()),
- intRand(static_cast<int32_t>(OpCodes::opFirst),
- static_cast<int32_t>(OpCodes::opLast)),
- operatorCounts{0} {
- opCreators[static_cast<int>(OpCodes::opConv2d)] =
+ RandomModelBuilder() : _operatorCounts{0}, _gen(_rd()),
+ _floatRand(std::numeric_limits<float>::min(),
+ std::numeric_limits<float>::max()),
+ _intRand(static_cast<int32_t>(OpCodes::opFirst),
+ static_cast<int32_t>(OpCodes::opLast)) {
+ _opCreators[static_cast<int>(OpCodes::opConv2d)] =
[this](treebuilder::Tree* t, treebuilder::Operation* op) {
createLayerCONV_2D(t, op);
};
+ _opCreators[static_cast<int>(OpCodes::opConcatenation)] =
+ [this](treebuilder::Tree* t, treebuilder::Operation* op) {
+ createLayerCONCATENATION(t, op);
+ };
+ _opCreators[static_cast<int>(OpCodes::opDepthwiseConv2d)] =
+ [this](treebuilder::Tree* t, treebuilder::Operation* op) {
+ createLayerDEPTHWISE_CONV_2D(t, op);
+ };
+ _opCreators[static_cast<int>(OpCodes::opOpMaxPool2d)] =
+ [this](treebuilder::Tree* t, treebuilder::Operation* op) {
+ createLayerX_POOL_2D(t, op, OpCodes::opOpMaxPool2d);
+ };
+ _opCreators[static_cast<int>(OpCodes::opAveragePool2d)] =
+ [this](treebuilder::Tree* t, treebuilder::Operation* op) {
+ createLayerX_POOL_2D(t, op, OpCodes::opAveragePool2d);
+ };
+ _opCreators[static_cast<int>(OpCodes::opSoftmax)] =
+ [this](treebuilder::Tree* t, treebuilder::Operation* op) {
+ createLayerSOFTMAX(t, op);
+ };
+ _opCreators[static_cast<int>(OpCodes::opFullyConnected)] =
+ [this](treebuilder::Tree* t, treebuilder::Operation* op) {
+ createLayerFULLY_CONNECTED(t, op);
+ };
};
virtual ~RandomModelBuilder() = default;
virtual std::unique_ptr<ModelSaver> createModelSaver() = 0;
protected:
- std::random_device rd;
- std::mt19937 gen;
- std::uniform_real_distribution<float> floatRand;
- std::uniform_int_distribution<int> intRand;
-
- /**
- * @brief operatorCounts this array contains amount of used operators in generated model.
- * @details For example: operatorCounts[OpCodes::opConv2d] -- amount of used 2D convolution operators.
- */
- int operatorCounts[static_cast<int32_t>(OpCodes::opCount)];
-
- /**
- * @brief opCreators this array contains a lambda with call of method
- * for building specified operator.
- * @details This array is used for convenient creation random operators,
- * like follow: opCreators[OpCodes::opCount]
- * For example: opCreators[OpCodes::opConv2d](0) -- will lead to call createLayerCONV_2D method.
- */
- std::function<void(treebuilder::Tree*, treebuilder::Operation*)>
- opCreators[static_cast<int32_t>(OpCodes::opCount)];
-
/**
* @Brief createInput does add input tensor to model.
*/
* @param input_tensor_id is id of input tensor.
*/
virtual void createLayerCONV_2D(treebuilder::Tree*, treebuilder::Operation*) = 0;
+ virtual void createLayerCONCATENATION(treebuilder::Tree* t, treebuilder::Operation* op) = 0;
+ virtual void createLayerDEPTHWISE_CONV_2D(treebuilder::Tree* t, treebuilder::Operation* op) = 0;
+ virtual void createLayerX_POOL_2D(treebuilder::Tree* t, treebuilder::Operation* op,
+ OpCodes opcode) = 0;
+ virtual void createLayerSOFTMAX(treebuilder::Tree* t, treebuilder::Operation* op) = 0;
+ virtual void createLayerFULLY_CONNECTED(treebuilder::Tree* t, treebuilder::Operation* op) = 0;
+
+ /**
+ * @brief opCreators this array contains a lambda with call of method
+ * for building specified operator.
+ * @details This array is used for convenient creation random operators,
+ * like follow: opCreators[OpCodes::opCount]
+ * For example: opCreators[OpCodes::opConv2d](0) -- will lead to call createLayerCONV_2D method.
+ */
+ std::function<void(treebuilder::Tree*,
+ treebuilder::Operation*)> _opCreators[static_cast<int32_t>(OpCodes::opCount)];
+ /**
+ * @brief operatorCounts this array contains amount of used operators in generated model.
+ * @details For example: operatorCounts[Op_CONV_2D] -- amount of used 2D convolution operators.
+ */
+ int _operatorCounts[static_cast<int32_t>(OpCodes::opCount)];
+
+ std::random_device _rd;
+ std::mt19937 _gen;
+ std::uniform_real_distribution<float> _floatRand;
+ std::uniform_int_distribution<int> _intRand;
};
} // namespace modelgen
std::cout << "Add operator [" << opNames[static_cast<int32_t>(op->opcode)] << "] on the level [ "
<< op->levelOwner << " ]" << std::endl;
- opCreators[static_cast<int32_t>(op->opcode)](t, op);
+ _opCreators[static_cast<int32_t>(op->opcode)](t, op);
_model->subgraphs[0]->outputs[0] = (*_model->subgraphs[0]->operators.rbegin())->outputs[0];
}
void TFLiteRandomModelBuilder::createLayerCONV_2D(treebuilder::Tree* t,
treebuilder::Operation* op) {
std::string output_name(opNames[static_cast<int32_t>(OpCodes::opConv2d)]);
- output_name += "_" + std::to_string(operatorCounts[static_cast<int32_t>(OpCodes::opConv2d)]);
+ output_name += "_" + std::to_string(_operatorCounts[static_cast<int32_t>(OpCodes::opConv2d)]);
auto operator_ptr = createEmptyOperator(op);
auto out_tensor_ptr = createEmptyTensor(op->outputShape, output_name.c_str());
_model->subgraphs[0]->operators.push_back(std::move(operator_ptr));
}
+void TFLiteRandomModelBuilder::createLayerCONCATENATION(treebuilder::Tree* t,
+ treebuilder::Operation* op) {
+ std::string output_name(opNames[static_cast<int32_t>(OpCodes::opConcatenation)]);
+ output_name += "_" +
+ std::to_string(_operatorCounts[static_cast<int32_t>(OpCodes::opConcatenation)]);
+ auto operator_ptr = createEmptyOperator(op);
+
+ auto out_tensor_ptr = createEmptyTensor(op->outputShape, output_name.c_str());
+
+ std::cout << "Concatination inputs [ ";
+ for (auto it : op->inputs) {
+ std::cout << it << "/";
+ auto input_tensor_id = op->levelOwner == 0 ? _operandTree2tensor[0] :
+ _operandTree2tensor[t->inputCnt + it];
+ std::cout << input_tensor_id << " ";
+ operator_ptr->inputs.push_back(input_tensor_id);
+ }
+ std::cout << "]" << std::endl;
+
+ auto output_tensor_id = static_cast<int32_t>(_model->subgraphs[0]->tensors.size());
+ _operandTree2tensor.push_back(output_tensor_id);
+ operator_ptr->outputs.push_back(output_tensor_id);
+ _model->subgraphs[0]->tensors.push_back(std::move(out_tensor_ptr));
+
+ operator_ptr->builtin_options.Set(tflite::ConcatenationOptionsT());
+ auto concat_opt = operator_ptr->builtin_options.AsConcatenationOptions();
+ concat_opt->fused_activation_function =
+ tflite::ActivationFunctionType::ActivationFunctionType_RELU6;
+ concat_opt->axis = 0;
+
+ for (auto it : op->inputShape) {
+ if (it == -1)
+ break ;
+ concat_opt->axis++;
+ }
+
+ _model->subgraphs[0]->operators.push_back(std::move(operator_ptr));
+}
+
+void TFLiteRandomModelBuilder::createLayerDEPTHWISE_CONV_2D(treebuilder::Tree* t,
+ treebuilder::Operation* op) {
+ std::string output_name(opNames[static_cast<int32_t>(OpCodes::opDepthwiseConv2d)]);
+ output_name += "_" +
+ std::to_string(_operatorCounts[static_cast<int32_t>(OpCodes::opDepthwiseConv2d)]);
+ auto operator_ptr = createEmptyOperator(op);
+
+ auto out_tensor_ptr = createEmptyTensor(op->outputShape, output_name.c_str());
+ auto kernel_ptr = createTensorWthBuffer(op->kernelShape, "Kernel");
+ auto bias_ptr = createTensorWthBuffer({op->outputShape[3]}, "bias");
+
+ auto input_tensor_id = op->levelOwner == 0 ? _operandTree2tensor[0] :
+ _operandTree2tensor[t->inputCnt + op->inputs[0]];
+
+ operator_ptr->inputs.push_back(input_tensor_id);
+ operator_ptr->inputs.push_back(static_cast<int32_t>(_model->subgraphs[0]->tensors.size()));
+ _model->subgraphs[0]->tensors.push_back(std::move(kernel_ptr));
+ operator_ptr->inputs.push_back(static_cast<int32_t>(_model->subgraphs[0]->tensors.size()));
+ _model->subgraphs[0]->tensors.push_back(std::move(bias_ptr));
+
+ auto output_tensor_id = static_cast<int32_t>(_model->subgraphs[0]->tensors.size());
+ _operandTree2tensor.push_back(output_tensor_id);
+ operator_ptr->outputs.push_back(output_tensor_id);
+ _model->subgraphs[0]->tensors.push_back(std::move(out_tensor_ptr));
+
+ operator_ptr->builtin_options.Set(tflite::DepthwiseConv2DOptionsT());
+ auto depthwise_conv2d_opt = operator_ptr->builtin_options.AsDepthwiseConv2DOptions();
+ depthwise_conv2d_opt->stride_w = depthwise_conv2d_opt->stride_h = 1;
+ depthwise_conv2d_opt->depth_multiplier = 1;
+ depthwise_conv2d_opt->fused_activation_function =
+ tflite::ActivationFunctionType::ActivationFunctionType_RELU6;
+
+ _model->subgraphs[0]->operators.push_back(std::move(operator_ptr));
+}
+
+void TFLiteRandomModelBuilder::createLayerX_POOL_2D(treebuilder::Tree* t,
+ treebuilder::Operation* op, OpCodes opcode) {
+ std::string output_name(opNames[static_cast<int32_t>(opcode)]);
+ output_name += "_" +
+ std::to_string(_operatorCounts[static_cast<int32_t>(opcode)]);
+ auto operator_ptr = createEmptyOperator(op);
+
+ auto out_tensor_ptr = createEmptyTensor(op->outputShape, output_name.c_str());
+
+ auto input_tensor_id = op->levelOwner == 0 ? _operandTree2tensor[0] :
+ _operandTree2tensor[t->inputCnt + op->inputs[0]];
+ operator_ptr->inputs.push_back(input_tensor_id);
+
+ auto output_tensor_id = static_cast<int32_t>(_model->subgraphs[0]->tensors.size());
+ _operandTree2tensor.push_back(output_tensor_id);
+ operator_ptr->outputs.push_back(output_tensor_id);
+ _model->subgraphs[0]->tensors.push_back(std::move(out_tensor_ptr));
+
+ /**
+ * @todo generate random filter width/height.
+ */
+ operator_ptr->builtin_options.Set(tflite::Pool2DOptionsT());
+ auto pool2d_opt = operator_ptr->builtin_options.AsPool2DOptions();
+ pool2d_opt->stride_w = pool2d_opt->stride_h = 1;
+ pool2d_opt->filter_width = pool2d_opt->filter_height = 3;
+
+ _model->subgraphs[0]->operators.push_back(std::move(operator_ptr));
+}
+
+void TFLiteRandomModelBuilder::createLayerSOFTMAX(treebuilder::Tree* t,
+ treebuilder::Operation* op) {
+ std::string output_name(opNames[static_cast<int32_t>(OpCodes::opSoftmax)]);
+ output_name += "_" +
+ std::to_string(_operatorCounts[static_cast<int32_t>(OpCodes::opSoftmax)]);
+ auto operator_ptr = createEmptyOperator(op);
+
+ auto out_tensor_ptr = createEmptyTensor(op->outputShape, output_name.c_str());
+
+ auto input_tensor_id = op->levelOwner == 0 ? _operandTree2tensor[0] :
+ _operandTree2tensor[t->inputCnt + op->inputs[0]];
+ operator_ptr->inputs.push_back(input_tensor_id);
+
+ auto output_tensor_id = static_cast<int32_t>(_model->subgraphs[0]->tensors.size());
+ _operandTree2tensor.push_back(output_tensor_id);
+ operator_ptr->outputs.push_back(output_tensor_id);
+ _model->subgraphs[0]->tensors.push_back(std::move(out_tensor_ptr));
+
+ operator_ptr->builtin_options.Set(tflite::SoftmaxOptionsT());
+ auto softmax_opt = operator_ptr->builtin_options.AsSoftmaxOptions();
+ softmax_opt->beta = _floatRand(_gen);
+
+ _model->subgraphs[0]->operators.push_back(std::move(operator_ptr));
+}
+
+void TFLiteRandomModelBuilder::createLayerFULLY_CONNECTED(treebuilder::Tree* t,
+ treebuilder::Operation* op) {
+ std::string output_name(opNames[static_cast<int32_t>(OpCodes::opFullyConnected)]);
+ output_name += "_" +
+ std::to_string(_operatorCounts[static_cast<int32_t>(OpCodes::opFullyConnected)]);
+ auto operator_ptr = createEmptyOperator(op);
+
+ auto out_tensor_ptr = createEmptyTensor(op->outputShape, output_name.c_str());
+ auto kernel_ptr = createTensorWthBuffer(op->kernelShape, "Kernel");
+ auto bias_ptr = createTensorWthBuffer({op->outputShape[3]}, "bias");
+
+ auto input_tensor_id = op->levelOwner == 0 ? _operandTree2tensor[0] :
+ _operandTree2tensor[t->inputCnt + op->inputs[0]];
+
+ operator_ptr->inputs.push_back(input_tensor_id);
+ operator_ptr->inputs.push_back(static_cast<int32_t>(_model->subgraphs[0]->tensors.size()));
+ _model->subgraphs[0]->tensors.push_back(std::move(kernel_ptr));
+ operator_ptr->inputs.push_back(static_cast<int32_t>(_model->subgraphs[0]->tensors.size()));
+ _model->subgraphs[0]->tensors.push_back(std::move(bias_ptr));
+
+ auto output_tensor_id = static_cast<int32_t>(_model->subgraphs[0]->tensors.size());
+ _operandTree2tensor.push_back(output_tensor_id);
+ operator_ptr->outputs.push_back(output_tensor_id);
+ _model->subgraphs[0]->tensors.push_back(std::move(out_tensor_ptr));
+
+ operator_ptr->builtin_options.Set(tflite::FullyConnectedOptionsT());
+ auto fullcon_opt = operator_ptr->builtin_options.AsFullyConnectedOptions();
+ fullcon_opt->fused_activation_function =
+ tflite::ActivationFunctionType::ActivationFunctionType_RELU6;
+
+ _model->subgraphs[0]->operators.push_back(std::move(operator_ptr));
+}
+
std::unique_ptr<TensorT>
TFLiteRandomModelBuilder::createEmptyTensor(const std::vector<int32_t>& shape,
const char* name) {
_model->buffers[tensor_ptr->buffer]->data.resize(buffer_size);
for (size_t i = 0; i < buffer_size; i += sizeof(float)) {
- float val = floatRand(gen);
+ float val = _floatRand(_gen);
memcpy(_model->buffers[tensor_ptr->buffer]->data.data() + i, &val, sizeof(float));
}
return tensor_ptr;
_mapOperatorCode[static_cast<int32_t>(op->opcode)] = opcode_id;
}
operator_ptr->opcode_index = static_cast<uint32_t>(opcode_id);
- operatorCounts[static_cast<int32_t>(op->opcode)]++;
+ _operatorCounts[static_cast<int32_t>(op->opcode)]++;
return operator_ptr;
}
* Operations:
*/
void createLayerCONV_2D(treebuilder::Tree* t, treebuilder::Operation* op) override;
+ void createLayerCONCATENATION(treebuilder::Tree* t, treebuilder::Operation* op) override;
+ void createLayerDEPTHWISE_CONV_2D(treebuilder::Tree* t, treebuilder::Operation* op) override;
+ void createLayerX_POOL_2D(treebuilder::Tree* t, treebuilder::Operation* op,
+ OpCodes opcode) override;
+ void createLayerSOFTMAX(treebuilder::Tree* t, treebuilder::Operation* op) override;
+ void createLayerFULLY_CONNECTED(treebuilder::Tree* t, treebuilder::Operation* op) override;
private:
/**
* @brief createEmptyTensor does create tensor without buffer
std::unique_ptr<OperatorT> createEmptyOperator(treebuilder::Operation* op);
std::unique_ptr<ModelT> _model;
-
/**
* @details This vector contains a index of tensor (in subgraph tflite vector)
* for output operand of tree's node `i`.
/**
* @brief mapOperatorCode contains indexes to operator_codes array in ModelT.
*/
- long _mapOperatorCode[static_cast<int32_t>(OpCodes::opCount)];
+ int32_t _mapOperatorCode[static_cast<int32_t>(OpCodes::opCount)];
};
} // namespace modelgen
*/
#include <assert.h>
+#include <algorithm>
#include "Tree.h"
static constexpr int widthMax = 10;
static constexpr int shapeMin = 1;
static constexpr int shapeMax = 64;
+static constexpr int concatCntInputsMin = 4;
+static constexpr int concatCntInputsMax = 8;
+static constexpr int depthwiseConv2dMultiply = 4;
+static constexpr int fullyConnectedMaxWeight = 8;
+static constexpr int fullyConnectedKernelDim = 2;
TreeBuilder::TreeBuilder() : _gen(_rd()) { }
auto levelWidth = int_rand(_gen);
t->widthLevel.push_back(static_cast<int32_t>(levelWidth));
t->beginLevel.push_back(static_cast<int32_t>(t->opList.size()));
- t->endLevel.push_back(static_cast<int32_t>(t->opList.size() + levelWidth));
+ t->endLevel.push_back(static_cast<int32_t>(t->opList.size() + levelWidth - 1));
for (int32_t i = 0; i < levelWidth; i++) {
auto op = std::unique_ptr<Operation>(new Operation);
op->levelOwner = levelId;
- buildSketchOperation(t, op.get());
+ /**
+ * If the operation was not created, then repeat the creation.
+ */
+ if (!buildSketchOperation(t, op.get())) {
+ i--;
+ continue;
+ }
t->opList.push_back(std::move(op));
}
}
for (int32_t i = 0; i < t->widthLevel[0]; i++) {
auto op = std::unique_ptr<Operation>(new Operation);
op->levelOwner = 0;
- buildSketchOperation(t, op.get());
+ /**
+ * If the operation was not created, then repeat the creation.
+ */
+ if (!buildSketchOperation(t, op.get())) {
+ i--;
+ continue;
+ }
t->opList.push_back(std::move(op));
}
}
-void TreeBuilder::buildSketchOperation(Tree* t, Operation* op) {
+bool TreeBuilder::buildSketchOperation(Tree* t, Operation* op) {
std::uniform_int_distribution<int32_t> opcode_rand(static_cast<int32_t>(OpCodes::opFirst),
static_cast<int32_t>(OpCodes::opLast));
case OpCodes::opConv2d:
buildSketchConv2D(t, op);
break;
+ case OpCodes::opConcatenation:
+ buildSketchConcat(t, op);
+ break;
+ case OpCodes::opDepthwiseConv2d:
+ buildSketchDepthwiseConv2D(t, op);
+ break;
+ case OpCodes::opOpMaxPool2d:
+ case OpCodes::opAveragePool2d:
+ buildSketchPooling(t, op);
+ break;
+ case OpCodes::opSoftmax:
+ buildSketchSoftmax(t, op);
+ break;
+ case OpCodes::opFullyConnected:
+ /**
+ * Currently, we can create fullyconnected operation only on last level.
+ * @todo fix it.
+ */
+ if (t->beginLevel.size() != static_cast<size_t>(t->hTree)) {
+ return false;
+ }
+
+ buildSketchFullyConnected(t, op);
+ break;
default:
- assert(false && "Unsopported operation!");
+ assert(false && "TreeBuilder: Unsupported operation");
+ }
+
+ return true;
+}
+
+void TreeBuilder::buildSketchConv2D(Tree* t, Operation* op) {
+ std::uniform_int_distribution<int32_t> int_rand(0, INT32_MAX);
+
+ if (t->beginLevel.size() == 1) {
+ op->inputShape = t->inputShapeTree;
+ buildSketchConv2DForShape(op->inputShape, op);
+ return ;
+ }
+
+ auto levelId = int_rand(_gen) % (t->beginLevel.size() - 1);
+ auto opId = t->beginLevel[levelId] + (int_rand(_gen) % t->widthLevel[levelId]);
+
+ std::cout << "input level [ " << levelId << " ] operation id [ " << opId << " ]" << std::endl;
+
+ op->inputs.push_back(opId);
+ op->levelOwner = t->beginLevel.size() - 1;
+ op->inputShape = t->opList[opId]->outputShape;
+ buildSketchConv2DForShape(op->inputShape, op);
+}
+
+void TreeBuilder::buildSketchConcat(Tree* t, Operation* op) {
+ std::uniform_int_distribution<int32_t> int_rand(2, INT32_MAX);
+ auto axis = 1 + (int_rand(_gen) + 3) % 3;
+ auto input_cnt = concatCntInputsMin + int_rand(_gen) % concatCntInputsMax;
+
+ /* Special case if there are only one level (input to neural network) */
+ if (t->beginLevel.size() == 1) {
+ op->inputShape = t->inputShapeTree;
+ op->outputShape = op->inputShape;
+ for (int i = 0; i < input_cnt; i++) {
+ op->inputs.push_back(-1); /* -1 means that it is needed to specify amount inputs
+ * on the first level where input tensor for operation
+ * is a input tensor for neural network. */
+ addConcatInput(op->inputShape, axis, op);
+ }
+ op->inputShape[axis] = -1; /* specify a dimension for concatenation. */
+ return ;
+ }
+
+ /* Select the first operand */
+ auto levelId = int_rand(_gen) % (t->beginLevel.size() - 1);
+ auto opId = t->beginLevel[levelId] + (int_rand(_gen) % t->widthLevel[levelId]);
+ std::cout << "input level [ " << levelId << " ] operation id [ " << opId << " ]" << std::endl;
+
+ op->inputs.push_back(opId);
+ op->levelOwner = t->beginLevel.size() - 1;
+ op->inputShape = t->opList[opId]->outputShape;
+ op->outputShape = op->inputShape;
+ std::vector<int32_t> shape = op->inputShape;
+ shape[axis] = -1;
+ for (int i = 0; i < input_cnt; i++) {
+ opId = lookupConsistentOutput(t, op, shape, t->beginLevel.size() - 1);
+ op->inputs.push_back(opId);
+ addConcatInput(t->opList[opId]->outputShape, axis, op);
+ }
+
+ op->inputShape[axis] = -1; /* specify a dimension for concatenation. */
+}
+
+void TreeBuilder::buildSketchDepthwiseConv2D(Tree* t, Operation* op) {
+ std::uniform_int_distribution<int32_t> int_rand(1, INT32_MAX);
+ /**
+ * Currently, on the stage of building arbitrary tree it is enough
+ * build DepthwiseConv2D node as Conv2D.
+ * @todo: maby there is sense to specifically create OpDepthwiseConv2d.
+ */
+ buildSketchConv2D(t, op);
+
+ /**
+ * Then change the kernel's shape.
+ */
+ op->kernelShape[0] = int_rand(_gen) % depthwiseConv2dMultiply + 1; /* channel multiplier */
+ op->kernelShape[1] = op->inputShape[3]; /* filter height */
+ op->kernelShape[2] = op->inputShape[2]; /* filter width */
+ op->kernelShape[3] = op->inputShape[3]; /* input channels */
+ op->outputShape[3] = op->kernelShape[0] * op->kernelShape[3];
+}
+
+void TreeBuilder::buildSketchPooling(Tree* t, Operation* op) {
+ std::uniform_int_distribution<int32_t> int_rand(2, INT32_MAX);
+
+ if (t->beginLevel.size() == 1) {
+ op->inputShape = t->inputShapeTree;
+ op->outputShape = op->inputShape;
+ return ;
}
+
+ auto levelId = int_rand(_gen) % (t->beginLevel.size() - 1);
+ auto opId = t->beginLevel[levelId] + (int_rand(_gen) % t->widthLevel[levelId]);
+
+ std::cout << "input level [ " << levelId << " ] operation id [ " << opId << " ]" << std::endl;
+
+ op->inputs.push_back(opId);
+ op->levelOwner = t->beginLevel.size() - 1;
+ op->inputShape = t->opList[opId]->outputShape;
+ op->outputShape = op->inputShape;
+}
+
+void TreeBuilder::buildSketchSoftmax(Tree* t, Operation* op) {
+ /**
+ * We need only select input node, the output shape will be same as input.
+ * That is why we use pooling's builder.
+ */
+ buildSketchPooling(t, op);
+}
+
+void TreeBuilder::buildSketchFullyConnected(Tree* t, Operation* op) {
+ std::uniform_int_distribution<int32_t> int_rand(2, fullyConnectedMaxWeight);
+ /**
+ * 1. Select a input form previous nodes by means of buildSketchPooling
+ */
+ buildSketchPooling(t, op);
+
+ /**
+ * 2. Create a weights for fully connected layer.
+ */
+ op->kernelShape.resize(fullyConnectedKernelDim);
+ op->kernelShape[0] = int_rand(_gen);
+ op->kernelShape[1] = op->inputShape[0] * op->inputShape[1] *
+ op->inputShape[2] * op->inputShape[3];
+
+ op->outputShape.resize(2);
+ op->outputShape[0] = op->kernelShape[0];
+ op->outputShape[1] = op->kernelShape[1];
+}
+
+// =========== private ===========
+
+int32_t TreeBuilder::lookupConsistentOutput(Tree* t, Operation* op, std::vector<int32_t>& shape,
+ int32_t until_level) {
+ for (int i =0, j = 0; i < t->beginLevel[until_level]; i++) {
+ for (j = 0; j < 4; j++) {
+ if (shape[j] != t->opList[i]->outputShape[j] && shape[j] != -1) {
+ j = 0;
+ break;
+ }
+ }
+ if (j == 3 && std::find(op->inputs.begin(), op->inputs.end(), i) == op->inputs.end())
+ return i;
+ }
+
+ /*
+ * Help to code below (initialization new_op):
+ * Z = y->inputs
+ * / \
+ * Y new_op
+ * \ /
+ * op
+ */
+ const Operation* y = t->opList[op->inputs[0]].get();
+ std::unique_ptr<Operation> new_op = std::unique_ptr<Operation>(new Operation(*y));
+
+ /*
+ * reindex operations
+ */
+ auto inser_pos = t->beginLevel[y->levelOwner];
+ for (auto& in : op->inputs) {
+ if (in >= inser_pos)
+ in++;
+ }
+ for (int i = inser_pos; i < static_cast<int32_t>(t->opList.size()); i++) {
+ for (auto& in : t->opList[i]->inputs) {
+ if (in >= inser_pos) {
+ in++;
+ }
+ }
+ }
+
+ t->endLevel[y->levelOwner]++;
+ t->widthLevel[y->levelOwner]++;
+ for (int i = y->levelOwner + 1; i < static_cast<int32_t>(t->beginLevel.size()); i++) {
+ t->beginLevel[i]++;
+ t->endLevel[i]++;
+ }
+ t->opList.insert(t->opList.begin() + inser_pos, std::move(new_op));
+
+ return inser_pos;
}
void TreeBuilder::buildSketchConv2DForShape(std::vector<int32_t>& input_shape, Operation* op) {
std::cout << " " << i;
std::cout << " ]" << std::endl;
}
-void TreeBuilder::buildSketchConv2D(Tree* t, Operation* op) {
- /** `-2` because we consider all upper levels exclude the current */
- auto rand_max = static_cast<int32_t>(t->beginLevel.size()) - 2;
- std::uniform_int_distribution<int32_t> int_rand(0, rand_max);
- if (op->levelOwner == 0) {
- op->inputShape = t->inputShapeTree;
- buildSketchConv2DForShape(op->inputShape, op);
- return ;
+void TreeBuilder::addConcatInput(std::vector<int32_t>& input_shape, int32_t axis, Operation* op) {
+ for (int i = 0 ; i < 4; i++) {
+ if (input_shape[i] != op->inputShape[i] && i != axis)
+ assert(false && "Not consistency input shapes\n");
}
-
- auto levelId = int_rand(_gen);
- auto opId = int_rand(_gen) % t->widthLevel[levelId];
-
- op->inputs.push_back(opId);
- op->levelOwner = t->beginLevel.size() - 1;
- buildSketchConv2DForShape(t->opList[opId]->outputShape, op);
+ op->outputShape[axis] += input_shape[axis];
}
} // namespace treebuilder
enum class OpCodes {
opConv2d,
+ opConcatenation,
+ opDepthwiseConv2d,
+ opOpMaxPool2d,
+ opAveragePool2d,
+ opSoftmax,
+ opFullyConnected,
opCount,
opFirst = 0,
opLast = opCount - 1,
* @param beginLevel keeps the vector of operations id which are beginners for level [i].
* @param endLevel keeps the vector of operations id which are end for level [i].
* @param opList keeps the vector of all used operations.
+ *
+ * @details beginLevel is array of indexes to opList.
+ * for example: beginLevel[4] contains the id of first operation on the level 4.
+ * id is index for opList array.
*/
-
struct Tree {
int inputCnt;
int hTree;
void initTree(Tree* t);
void buildLevel(Tree* t);
- void buildSketchOperation(Tree* t, Operation* op);
+ bool buildSketchOperation(Tree* t, Operation* op);
/**
* @details Currently Conv2D are build with stride = 1.
* @param input_shape is the shape of input tensor.
* @param op is the pointer to created operation.
*/
- void buildSketchConv2DForShape(std::vector<int32_t>& input_shape, Operation* op);
void buildSketchConv2D(Tree* t, Operation* op);
-
+ void buildSketchConcat(Tree* t, Operation* op);
+ void buildSketchDepthwiseConv2D(Tree* t, Operation* op);
+ void buildSketchPooling(Tree* t, Operation* op);
+ void buildSketchSoftmax(Tree* t, Operation* op);
+ void buildSketchFullyConnected(Tree* t, Operation* op);
private:
+ /**
+ * @lookupConsistentOutput It Looks up operation with conststent output's shape.
+ * @param t is a tree where doing search.
+ * @param op is a operation for which looking a operation.
+ * @param shape is a template of shape. -1 in this vector is ignorid axis.
+ * For example in case of <1, 64, 64, -1> the lookupConsistentOutput looks up
+ * operation with output <1, 64, 64, X>.
+ * @param until_level is a level starting from which the searching doesn't perform.
+ *
+ * @details If the operation doesn't found then this method create such operation
+ * and return its id.
+ *
+ */
+ int32_t lookupConsistentOutput(Tree* t, Operation* op, std::vector<int32_t>& shape,
+ int32_t until_level);
+
+ void buildSketchConv2DForShape(std::vector<int32_t>& input_shape, Operation* op);
+ void addConcatInput(std::vector<int32_t>& input_shape, int32_t axis, Operation* op);
+
std::random_device _rd;
std::mt19937 _gen;
};