From: Yangqing Jia Date: Fri, 26 Sep 2014 20:58:45 +0000 (-0700) Subject: running factory. X-Git-Tag: submit/tizen/20180823.020014~572^2~150^2~10 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=d316628f8dfffb6910fe8010b9c50085ba7196a8;p=platform%2Fupstream%2Fcaffeonacl.git running factory. --- diff --git a/Makefile b/Makefile index 5020b41..243650b 100644 --- a/Makefile +++ b/Makefile @@ -20,7 +20,6 @@ endif # The target shared library and static library name LIB_BUILD_DIR := $(BUILD_DIR)/lib NAME := $(LIB_BUILD_DIR)/lib$(PROJECT).so -STATIC_NAME := $(LIB_BUILD_DIR)/lib$(PROJECT).a ############################## # Get all source files @@ -338,7 +337,7 @@ SUPERCLEAN_EXTS := .so .a .o .bin .testbin .pb.cc .pb.h _pb2.py .cuo py mat py$(PROJECT) mat$(PROJECT) proto runtest \ superclean supercleanlist supercleanfiles warn everything -all: $(NAME) $(STATIC_NAME) tools examples +all: $(NAME) tools examples everything: all py$(PROJECT) mat$(PROJECT) test warn lint runtest @@ -387,16 +386,16 @@ py$(PROJECT): py py: $(PY$(PROJECT)_SO) $(PROTO_GEN_PY) -$(PY$(PROJECT)_SO): $(STATIC_NAME) $(PY$(PROJECT)_SRC) $(PY$(PROJECT)_HXX_SRC) +$(PY$(PROJECT)_SO): $(OBJS) $(PY$(PROJECT)_SRC) $(PY$(PROJECT)_HXX_SRC) $(CXX) -shared -o $@ $(PY$(PROJECT)_SRC) \ - $(STATIC_NAME) $(LINKFLAGS) $(PYTHON_LDFLAGS) + $(OBJS) $(LINKFLAGS) $(PYTHON_LDFLAGS) @ echo mat$(PROJECT): mat mat: $(MAT$(PROJECT)_SO) -$(MAT$(PROJECT)_SO): $(MAT$(PROJECT)_SRC) $(STATIC_NAME) +$(MAT$(PROJECT)_SO): $(MAT$(PROJECT)_SRC) $(OBJS) @ if [ -z "$(MATLAB_DIR)" ]; then \ echo "MATLAB_DIR must be specified in $(CONFIG_FILE)" \ "to build mat$(PROJECT)."; \ @@ -405,7 +404,7 @@ $(MAT$(PROJECT)_SO): $(MAT$(PROJECT)_SRC) $(STATIC_NAME) $(MATLAB_DIR)/bin/mex $(MAT$(PROJECT)_SRC) \ CXX="$(CXX)" \ CXXFLAGS="\$$CXXFLAGS $(MATLAB_CXXFLAGS)" \ - CXXLIBS="\$$CXXLIBS $(STATIC_NAME) $(LDFLAGS)" -output $@ + CXXLIBS="\$$CXXLIBS $(OBJS) $(LDFLAGS)" -output $@ @ echo runtest: $(TEST_ALL_BIN) @@ -443,14 +442,10 @@ $(BUILD_DIR)/.linked: $(ALL_BUILD_DIRS): | $(BUILD_DIR_LINK) @ mkdir -p $@ -$(NAME): $(PROTO_OBJS) $(OBJS) | $(LIB_BUILD_DIR) +$(NAME): $(OBJS) | $(LIB_BUILD_DIR) $(CXX) -shared -o $@ $(OBJS) $(LINKFLAGS) $(LDFLAGS) @ echo -$(STATIC_NAME): $(PROTO_OBJS) $(OBJS) | $(LIB_BUILD_DIR) - ar rcs $@ $(PROTO_OBJS) $(OBJS) - @ echo - $(TEST_BUILD_DIR)/%.o: src/$(PROJECT)/test/%.cpp $(HXX_SRCS) $(TEST_HXX_SRCS) \ | $(TEST_BUILD_DIR) $(CXX) $< $(CXXFLAGS) -c -o $@ 2> $@.$(WARNS_EXT) \ @@ -465,21 +460,21 @@ $(TEST_BUILD_DIR)/%.cuo: src/$(PROJECT)/test/%.cu $(HXX_SRCS) $(TEST_HXX_SRCS) \ @ cat $@.$(WARNS_EXT) @ echo -$(TEST_ALL_BIN): $(TEST_MAIN_SRC) $(TEST_OBJS) $(GTEST_OBJ) $(STATIC_NAME) \ +$(TEST_ALL_BIN): $(TEST_MAIN_SRC) $(TEST_OBJS) $(GTEST_OBJ) $(OBJS) \ | $(TEST_BIN_DIR) - $(CXX) $(TEST_MAIN_SRC) $(TEST_OBJS) $(GTEST_OBJ) $(STATIC_NAME) \ + $(CXX) $(TEST_MAIN_SRC) $(TEST_OBJS) $(GTEST_OBJ) $(OBJS) \ -o $@ $(LINKFLAGS) $(LDFLAGS) @ echo -$(TEST_CU_BINS): $(TEST_BIN_DIR)/%.testbin: $(TEST_BUILD_DIR)/%.cuo $(GTEST_OBJ) $(STATIC_NAME) \ +$(TEST_CU_BINS): $(TEST_BIN_DIR)/%.testbin: $(TEST_BUILD_DIR)/%.cuo $(GTEST_OBJ) $(OBJS) \ | $(TEST_BIN_DIR) - $(CXX) $(TEST_MAIN_SRC) $< $(GTEST_OBJ) $(STATIC_NAME) \ + $(CXX) $(TEST_MAIN_SRC) $< $(GTEST_OBJ) $(OBJS) \ -o $@ $(LINKFLAGS) $(LDFLAGS) @ echo -$(TEST_CXX_BINS): $(TEST_BIN_DIR)/%.testbin: $(TEST_BUILD_DIR)/%.o $(GTEST_OBJ) $(STATIC_NAME) \ +$(TEST_CXX_BINS): $(TEST_BIN_DIR)/%.testbin: $(TEST_BUILD_DIR)/%.o $(GTEST_OBJ) $(OBJS) \ | $(TEST_BIN_DIR) - $(CXX) $(TEST_MAIN_SRC) $< $(GTEST_OBJ) $(STATIC_NAME) \ + $(CXX) $(TEST_MAIN_SRC) $< $(GTEST_OBJ) $(OBJS) \ -o $@ $(LINKFLAGS) $(LDFLAGS) @ echo @@ -488,12 +483,12 @@ $(TOOL_BUILD_DIR)/%: $(TOOL_BUILD_DIR)/%.bin | $(TOOL_BUILD_DIR) @ $(RM) $@ @ ln -s $(abspath $<) $@ -$(TOOL_BINS): %.bin : %.o $(STATIC_NAME) - $(CXX) $< $(STATIC_NAME) -o $@ $(LINKFLAGS) $(LDFLAGS) +$(TOOL_BINS): %.bin : %.o $(OBJS) + $(CXX) $< $(OBJS) -o $@ $(LINKFLAGS) $(LDFLAGS) @ echo -$(EXAMPLE_BINS): %.bin : %.o $(STATIC_NAME) - $(CXX) $< $(STATIC_NAME) -o $@ $(LINKFLAGS) $(LDFLAGS) +$(EXAMPLE_BINS): %.bin : %.o $(OBJS) + $(CXX) $< $(OBJS) -o $@ $(LINKFLAGS) $(LDFLAGS) @ echo $(LAYER_BUILD_DIR)/%.o: src/$(PROJECT)/layers/%.cpp $(HXX_SRCS) \ @@ -612,6 +607,5 @@ $(DISTRIBUTE_DIR): all py $(HXX_SRCS) | $(DISTRIBUTE_SUBDIRS) cp $(EXAMPLE_BINS) $(DISTRIBUTE_DIR)/bin # add libraries cp $(NAME) $(DISTRIBUTE_DIR)/lib - cp $(STATIC_NAME) $(DISTRIBUTE_DIR)/lib # add python - it's not the standard way, indeed... cp -r python $(DISTRIBUTE_DIR)/python diff --git a/include/caffe/layer.hpp b/include/caffe/layer.hpp index 18ff274..8a8330b 100644 --- a/include/caffe/layer.hpp +++ b/include/caffe/layer.hpp @@ -7,6 +7,7 @@ #include "caffe/blob.hpp" #include "caffe/common.hpp" +#include "caffe/layer_factory.hpp" #include "caffe/proto/caffe.pb.h" #include "caffe/util/device_alternate.hpp" @@ -469,10 +470,6 @@ void Layer::ToProto(LayerParameter* param, bool write_diff) { } } -// The layer factory function -template -Layer* GetLayer(const LayerParameter& param); - } // namespace caffe #endif // CAFFE_LAYER_H_ diff --git a/include/caffe/layer_factory.hpp b/include/caffe/layer_factory.hpp new file mode 100644 index 0000000..08e3e6d --- /dev/null +++ b/include/caffe/layer_factory.hpp @@ -0,0 +1,85 @@ +#ifndef CAFFE_LAYER_FACTORY_H_ +#define CAFFE_LAYER_FACTORY_H_ + +#include + +#include "caffe/common.hpp" +#include "caffe/proto/caffe.pb.h" + +namespace caffe { + +template +class Layer; + +template +class LayerRegistry { + public: + typedef Layer* (*Creator)(const LayerParameter&); + typedef std::map CreatorRegistry; + + // Adds a creator. + static void AddCreator(const LayerParameter_LayerType& type, + Creator creator) { + CHECK_EQ(registry_.count(type), 0) + << "Layer type " << type << " already registered."; + registry_[type] = creator; + } + + // Get a layer using a LayerParameter. + static Layer* CreateLayer(const LayerParameter& param) { + LOG(INFO) << "Creating layer " << param.name(); + const LayerParameter_LayerType& type = param.type(); + CHECK_EQ(registry_.count(type), 1); + return registry_[type](param); + } + + private: + // Layer registry should never be instantiated - everything is done with its + // static variables. + LayerRegistry() {} + static CreatorRegistry registry_; +}; + +// Static variables for the templated layer factory registry. +template +typename LayerRegistry::CreatorRegistry LayerRegistry::registry_; + +template +class LayerRegisterer { + public: + LayerRegisterer(const LayerParameter_LayerType& type, + Layer* (*creator)(const LayerParameter&)) { + LOG(INFO) << "Registering layer type: " << type; + LayerRegistry::AddCreator(type, creator); + } +}; + + +#define REGISTER_LAYER_CREATOR(type, creator, classname) \ + static LayerRegisterer g_creator_f_##classname(type, creator); \ + static LayerRegisterer g_creator_d_##classname(type, creator) + +#define REGISTER_LAYER_CLASS(type, clsname) \ + template \ + Layer* Creator_##clsname(const LayerParameter& param) { \ + return new clsname(param); \ + } \ + static LayerRegisterer g_creator_f_##clsname( \ + type, Creator_##clsname); \ + static LayerRegisterer g_creator_d_##clsname( \ + type, Creator_##clsname) + +// A function to get a specific layer from the specification given in +// LayerParameter. Ideally this would be replaced by a factory pattern, +// but we will leave it this way for now. +// Yangqing's note: With LayerRegistry, we no longer need this thin wrapper any +// more. It is provided here for backward compatibility and should be removed in +// the future. +template +Layer* GetLayer(const LayerParameter& param) { + return LayerRegistry::CreateLayer(param); +} + +} // namespace caffe + +#endif // CAFFE_LAYER_FACTORY_H_ diff --git a/src/caffe/layer_factory.cpp b/src/caffe/layer_factory.cpp index 5661629..79a732b 100644 --- a/src/caffe/layer_factory.cpp +++ b/src/caffe/layer_factory.cpp @@ -1,6 +1,7 @@ #include #include "caffe/layer.hpp" +#include "caffe/layer_factory.hpp" #include "caffe/proto/caffe.pb.h" #include "caffe/vision_layers.hpp" @@ -11,7 +12,7 @@ namespace caffe { // Get convolution layer according to engine. template -ConvolutionLayer* GetConvolutionLayer(const string& name, +Layer* GetConvolutionLayer( const LayerParameter& param) { ConvolutionParameter_Engine engine = param.convolution_param().engine(); if (engine == ConvolutionParameter_Engine_DEFAULT) { @@ -27,21 +28,14 @@ ConvolutionLayer* GetConvolutionLayer(const string& name, return new CuDNNConvolutionLayer(param); #endif } else { - LOG(FATAL) << "Layer " << name << " has unknown engine."; + LOG(FATAL) << "Layer " << param.name() << " has unknown engine."; } } -template ConvolutionLayer* GetConvolutionLayer(const string& name, - const LayerParameter& param); -template ConvolutionLayer* GetConvolutionLayer(const string& name, - const LayerParameter& param); - // Get pooling layer according to engine. template -PoolingLayer* GetPoolingLayer(const string& name, - const LayerParameter& param) { - const PoolingParameter& p_param = param.pooling_param(); - PoolingParameter_Engine engine = p_param.engine(); +Layer* GetPoolingLayer(const LayerParameter& param) { + PoolingParameter_Engine engine = param.pooling_param().engine(); if (engine == PoolingParameter_Engine_DEFAULT) { engine = PoolingParameter_Engine_CAFFE; #ifdef USE_CUDNN @@ -60,19 +54,13 @@ PoolingLayer* GetPoolingLayer(const string& name, return new CuDNNPoolingLayer(param); #endif } else { - LOG(FATAL) << "Layer " << name << " has unknown engine."; + LOG(FATAL) << "Layer " << param.name() << " has unknown engine."; } } -template PoolingLayer* GetPoolingLayer(const string& name, - const LayerParameter& param); -template PoolingLayer* GetPoolingLayer(const string& name, - const LayerParameter& param); - // Get relu layer according to engine. template -ReLULayer* GetReLULayer(const string& name, - const LayerParameter& param) { +Layer* GetReLULayer(const LayerParameter& param) { ReLUParameter_Engine engine = param.relu_param().engine(); if (engine == ReLUParameter_Engine_DEFAULT) { engine = ReLUParameter_Engine_CAFFE; @@ -87,19 +75,13 @@ ReLULayer* GetReLULayer(const string& name, return new CuDNNReLULayer(param); #endif } else { - LOG(FATAL) << "Layer " << name << " has unknown engine."; + LOG(FATAL) << "Layer " << param.name() << " has unknown engine."; } } -template ReLULayer* GetReLULayer(const string& name, - const LayerParameter& param); -template ReLULayer* GetReLULayer(const string& name, - const LayerParameter& param); - // Get sigmoid layer according to engine. template -SigmoidLayer* GetSigmoidLayer(const string& name, - const LayerParameter& param) { +Layer* GetSigmoidLayer(const LayerParameter& param) { SigmoidParameter_Engine engine = param.sigmoid_param().engine(); if (engine == SigmoidParameter_Engine_DEFAULT) { engine = SigmoidParameter_Engine_CAFFE; @@ -114,19 +96,13 @@ SigmoidLayer* GetSigmoidLayer(const string& name, return new CuDNNSigmoidLayer(param); #endif } else { - LOG(FATAL) << "Layer " << name << " has unknown engine."; + LOG(FATAL) << "Layer " << param.name() << " has unknown engine."; } } -template SigmoidLayer* GetSigmoidLayer(const string& name, - const LayerParameter& param); -template SigmoidLayer* GetSigmoidLayer(const string& name, - const LayerParameter& param); - // Get tanh layer according to engine. template -TanHLayer* GetTanHLayer(const string& name, - const LayerParameter& param) { +Layer* GetTanHLayer(const LayerParameter& param) { TanHParameter_Engine engine = param.tanh_param().engine(); if (engine == TanHParameter_Engine_DEFAULT) { engine = TanHParameter_Engine_CAFFE; @@ -141,19 +117,13 @@ TanHLayer* GetTanHLayer(const string& name, return new CuDNNTanHLayer(param); #endif } else { - LOG(FATAL) << "Layer " << name << " has unknown engine."; + LOG(FATAL) << "Layer " << param.name() << " has unknown engine."; } } -template TanHLayer* GetTanHLayer(const string& name, - const LayerParameter& param); -template TanHLayer* GetTanHLayer(const string& name, - const LayerParameter& param); - // Get softmax layer according to engine. template -SoftmaxLayer* GetSoftmaxLayer(const string& name, - const LayerParameter& param) { +Layer* GetSoftmaxLayer(const LayerParameter& param) { SoftmaxParameter_Engine engine = param.softmax_param().engine(); if (engine == SoftmaxParameter_Engine_DEFAULT) { engine = SoftmaxParameter_Engine_CAFFE; @@ -168,107 +138,58 @@ SoftmaxLayer* GetSoftmaxLayer(const string& name, return new CuDNNSoftmaxLayer(param); #endif } else { - LOG(FATAL) << "Layer " << name << " has unknown engine."; - } -} - -template SoftmaxLayer* GetSoftmaxLayer(const string& name, - const LayerParameter& param); -template SoftmaxLayer* GetSoftmaxLayer(const string& name, - const LayerParameter& param); - -// A function to get a specific layer from the specification given in -// LayerParameter. Ideally this would be replaced by a factory pattern, -// but we will leave it this way for now. -template -Layer* GetLayer(const LayerParameter& param) { - const string& name = param.name(); - const LayerParameter_LayerType& type = param.type(); - switch (type) { - case LayerParameter_LayerType_ACCURACY: - return new AccuracyLayer(param); - case LayerParameter_LayerType_ABSVAL: - return new AbsValLayer(param); - case LayerParameter_LayerType_ARGMAX: - return new ArgMaxLayer(param); - case LayerParameter_LayerType_BNLL: - return new BNLLLayer(param); - case LayerParameter_LayerType_CONCAT: - return new ConcatLayer(param); - case LayerParameter_LayerType_CONTRASTIVE_LOSS: - return new ContrastiveLossLayer(param); - case LayerParameter_LayerType_CONVOLUTION: - return GetConvolutionLayer(name, param); - case LayerParameter_LayerType_DATA: - return new DataLayer(param); - case LayerParameter_LayerType_DROPOUT: - return new DropoutLayer(param); - case LayerParameter_LayerType_DUMMY_DATA: - return new DummyDataLayer(param); - case LayerParameter_LayerType_EUCLIDEAN_LOSS: - return new EuclideanLossLayer(param); - case LayerParameter_LayerType_ELTWISE: - return new EltwiseLayer(param); - case LayerParameter_LayerType_FLATTEN: - return new FlattenLayer(param); - case LayerParameter_LayerType_HDF5_DATA: - return new HDF5DataLayer(param); - case LayerParameter_LayerType_HDF5_OUTPUT: - return new HDF5OutputLayer(param); - case LayerParameter_LayerType_HINGE_LOSS: - return new HingeLossLayer(param); - case LayerParameter_LayerType_IMAGE_DATA: - return new ImageDataLayer(param); - case LayerParameter_LayerType_IM2COL: - return new Im2colLayer(param); - case LayerParameter_LayerType_INFOGAIN_LOSS: - return new InfogainLossLayer(param); - case LayerParameter_LayerType_INNER_PRODUCT: - return new InnerProductLayer(param); - case LayerParameter_LayerType_LRN: - return new LRNLayer(param); - case LayerParameter_LayerType_MEMORY_DATA: - return new MemoryDataLayer(param); - case LayerParameter_LayerType_MVN: - return new MVNLayer(param); - case LayerParameter_LayerType_MULTINOMIAL_LOGISTIC_LOSS: - return new MultinomialLogisticLossLayer(param); - case LayerParameter_LayerType_POOLING: - return GetPoolingLayer(name, param); - case LayerParameter_LayerType_POWER: - return new PowerLayer(param); - case LayerParameter_LayerType_RELU: - return GetReLULayer(name, param); - case LayerParameter_LayerType_SILENCE: - return new SilenceLayer(param); - case LayerParameter_LayerType_SIGMOID: - return GetSigmoidLayer(name, param); - case LayerParameter_LayerType_SIGMOID_CROSS_ENTROPY_LOSS: - return new SigmoidCrossEntropyLossLayer(param); - case LayerParameter_LayerType_SLICE: - return new SliceLayer(param); - case LayerParameter_LayerType_SOFTMAX: - return GetSoftmaxLayer(name, param); - case LayerParameter_LayerType_SOFTMAX_LOSS: - return new SoftmaxWithLossLayer(param); - case LayerParameter_LayerType_SPLIT: - return new SplitLayer(param); - case LayerParameter_LayerType_TANH: - return GetTanHLayer(name, param); - case LayerParameter_LayerType_WINDOW_DATA: - return new WindowDataLayer(param); - case LayerParameter_LayerType_THRESHOLD: - return new ThresholdLayer(param); - case LayerParameter_LayerType_NONE: - LOG(FATAL) << "Layer " << name << " has unspecified type."; - default: - LOG(FATAL) << "Layer " << name << " has unknown type " << type; + LOG(FATAL) << "Layer " << param.name() << " has unknown engine."; } - // just to suppress old compiler warnings. - return (Layer*)(NULL); } -template Layer* GetLayer(const LayerParameter& param); -template Layer* GetLayer(const LayerParameter& param); +// Layers that have a specific creator function. +REGISTER_LAYER_CREATOR(LayerParameter_LayerType_CONVOLUTION, + GetConvolutionLayer, ConvolutionLayer); +REGISTER_LAYER_CREATOR(LayerParameter_LayerType_POOLING, + GetPoolingLayer, PoolingLayer); +REGISTER_LAYER_CREATOR(LayerParameter_LayerType_RELU, + GetReLULayer, ReLULayer); +REGISTER_LAYER_CREATOR(LayerParameter_LayerType_SIGMOID, + GetSigmoidLayer, SigmoidLayer); +REGISTER_LAYER_CREATOR(LayerParameter_LayerType_SOFTMAX, + GetSoftmaxLayer, SoftmaxLayer); +REGISTER_LAYER_CREATOR(LayerParameter_LayerType_TANH, + GetTanHLayer, TanHLayer); +// Layers that use their constructor as their default creator. +REGISTER_LAYER_CLASS(LayerParameter_LayerType_ACCURACY, AccuracyLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_ABSVAL, AbsValLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_ARGMAX, ArgMaxLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_BNLL, BNLLLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_CONCAT, ConcatLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_CONTRASTIVE_LOSS, + ContrastiveLossLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_DATA, DataLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_DROPOUT, DropoutLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_DUMMY_DATA, DummyDataLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_EUCLIDEAN_LOSS, + EuclideanLossLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_ELTWISE, EltwiseLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_FLATTEN, FlattenLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_HDF5_DATA, HDF5DataLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_HDF5_OUTPUT, HDF5OutputLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_HINGE_LOSS, HingeLossLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_IMAGE_DATA, ImageDataLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_IM2COL, Im2colLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_INFOGAIN_LOSS, InfogainLossLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_INNER_PRODUCT, InnerProductLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_LRN, LRNLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_MEMORY_DATA, MemoryDataLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_MVN, MVNLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_MULTINOMIAL_LOGISTIC_LOSS, + MultinomialLogisticLossLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_POWER, PowerLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_SILENCE, SilenceLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_SIGMOID_CROSS_ENTROPY_LOSS, + SigmoidCrossEntropyLossLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_SLICE, SliceLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_SOFTMAX_LOSS, + SoftmaxWithLossLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_SPLIT, SplitLayer); +REGISTER_LAYER_CLASS(LayerParameter_LayerType_WINDOW_DATA, WindowDataLayer); } // namespace caffe