std::set<int> readedIndexes;
std::map<int, Mat> storages;
std::map<int, Mat> tensors;
+ // Stack with numbers of unconnected layers per scope (Sequential, ConcatTable etc.)
+ std::vector<int> numUnconnectedLayers;
struct Module
{
layerParams.set("inputDimension", scalarParams.get<int>("inputDimension"));
layerParams.set("outputDimension", scalarParams.get<int>("outputDimension"));
}
- if (nnName == "Concat")
- {
- layerParams.set("dimension", scalarParams.get<int>("dimension"));
- }
- if (nnName == "JoinTable")
- {
- layerParams.set("dimension", scalarParams.get<int>("dimension"));
- }
- if (nnName == "DepthConcat")
+ else if (nnName == "Concat" || nnName == "JoinTable" || nnName == "DepthConcat")
{
layerParams.set("dimension", scalarParams.get<int>("dimension"));
}
{
newId = fill(module->modules[i], addedModules, prevLayerId, prevOutNum);
}
+ numUnconnectedLayers.push_back(module->modules.size());
return newId;
}
else if (module->thName == "JoinTable") {
mergeId = net.addLayer(generateLayerName("torchMerge"), "Concat", mergeParams);
addedModules.push_back(std::make_pair(mergeId, module));
- for (int i = 0; i < ids.size(); i++)
+ // Connect to the last number of unconnected layers.
+ CV_Assert(!numUnconnectedLayers.empty());
+ const int numInputs = numUnconnectedLayers.back();
+ numUnconnectedLayers.pop_back();
+ CV_Assert(numInputs <= ids.size());
+ for (int i = 0; i < numInputs; i++)
{
- net.connect(ids[i], 0, mergeId, i);
+ net.connect(ids[ids.size() - numInputs + i], 0, mergeId, i);
}
return mergeId;
int id = net.addLayer(name, "Eltwise", params);
- for (int i = 0; i < ids.size(); i++)
+ // Connect to the last number of unconnected layers.
+ CV_Assert(!numUnconnectedLayers.empty());
+ const int numInputs = numUnconnectedLayers.back();
+ numUnconnectedLayers.pop_back();
+ CV_Assert(numInputs <= ids.size());
+ for (int i = 0; i < numInputs; i++)
{
- net.connect(ids[i], 0, id, i);
+ net.connect(ids[ids.size() - numInputs + i], 0, id, i);
}
addedModules.push_back(std::make_pair(id, module));