From 42512242cc30d7aaf2f5533cd1ab4e43a135d59a Mon Sep 17 00:00:00 2001 From: Sebastian Messmer Date: Thu, 28 Feb 2019 14:03:34 -0800 Subject: [PATCH] refactor caffe2 operator constructors - 4/9 (#17085) Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/17085 clangr codemod Reviewed By: ezyang Differential Revision: D14078515 fbshipit-source-id: aaa48ae10892e3f47063f2133e026fea46f3240b --- caffe2/operators/group_norm_op.h | 10 +++++--- caffe2/operators/gru_unit_op.h | 30 ++++++++++------------ caffe2/operators/h_softmax_op.h | 18 ++++++++----- caffe2/operators/half_float_ops.h | 10 +++++--- caffe2/operators/heatmap_max_keypoint_op.h | 5 ++-- caffe2/operators/if_op.h | 2 +- caffe2/operators/im2col_op.h | 10 +++++--- caffe2/operators/index_hash_ops.h | 5 ++-- caffe2/operators/index_ops.cc | 29 ++++++++++++--------- caffe2/operators/instance_norm_op.h | 10 +++++--- caffe2/operators/integral_image_op.h | 10 +++++--- caffe2/operators/key_split_ops.h | 5 ++-- caffe2/operators/last_n_window_collector.cc | 5 ++-- caffe2/operators/layer_norm_op.h | 5 ++-- caffe2/operators/leaky_relu_op.h | 10 +++++--- caffe2/operators/length_split_op.h | 5 ++-- caffe2/operators/lengths_pad_op.h | 5 ++-- caffe2/operators/lengths_reducer_ops.h | 5 ++-- caffe2/operators/lengths_top_k_op.h | 12 ++++++--- caffe2/operators/listwise_l2r_op.h | 5 ++-- caffe2/operators/load_save_op.h | 8 +++--- caffe2/operators/local_response_normalization_op.h | 15 ++++++----- .../local_response_normalization_op_cudnn.cc | 10 +++++--- caffe2/operators/locally_connected_op.h | 10 +++++--- caffe2/operators/logit_op.h | 5 ++-- 25 files changed, 142 insertions(+), 102 deletions(-) diff --git a/caffe2/operators/group_norm_op.h b/caffe2/operators/group_norm_op.h index 0af276f..826eb17 100644 --- a/caffe2/operators/group_norm_op.h +++ b/caffe2/operators/group_norm_op.h @@ -18,8 +18,9 @@ class GroupNormOp final : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - GroupNormOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit GroupNormOp(Args&&... args) + : Operator(std::forward(args)...), OP_SINGLE_ARG(int, "group", group_, 32), OP_SINGLE_ARG(float, "epsilon", epsilon_, 1e-5), order_(StringToStorageOrder( @@ -195,8 +196,9 @@ class GroupNormGradientOp final : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - GroupNormGradientOp(const OperatorDef& def, Workspace* ws) - : Operator(def, ws), + template + explicit GroupNormGradientOp(Args&&... args) + : Operator(std::forward(args)...), OP_SINGLE_ARG(int, "group", group_, 32), order_(StringToStorageOrder( this->template GetSingleArgument("order", "NCHW"))) { diff --git a/caffe2/operators/gru_unit_op.h b/caffe2/operators/gru_unit_op.h index 6bb205a..721b882 100644 --- a/caffe2/operators/gru_unit_op.h +++ b/caffe2/operators/gru_unit_op.h @@ -112,14 +112,13 @@ void GRUUnitGradient( template class GRUUnitOp : public Operator { public: - GRUUnitOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), - drop_states_(this->template GetSingleArgument( - "drop_states", - false)), - sequence_lengths_(this->template GetSingleArgument( - "sequence_lengths", - true)) {} + template + explicit GRUUnitOp(Args&&... args) + : Operator(std::forward(args)...), + drop_states_( + this->template GetSingleArgument("drop_states", false)), + sequence_lengths_( + this->template GetSingleArgument("sequence_lengths", true)) {} USE_OPERATOR_CONTEXT_FUNCTIONS; bool RunOnDevice() override { @@ -168,14 +167,13 @@ class GRUUnitOp : public Operator { template class GRUUnitGradientOp : public Operator { public: - GRUUnitGradientOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), - drop_states_(this->template GetSingleArgument( - "drop_states", - false)), - sequence_lengths_(this->template GetSingleArgument( - "sequence_lengths", - true)) {} + template + explicit GRUUnitGradientOp(Args&&... args) + : Operator(std::forward(args)...), + drop_states_( + this->template GetSingleArgument("drop_states", false)), + sequence_lengths_( + this->template GetSingleArgument("sequence_lengths", true)) {} USE_OPERATOR_CONTEXT_FUNCTIONS; bool RunOnDevice() override { diff --git a/caffe2/operators/h_softmax_op.h b/caffe2/operators/h_softmax_op.h index c2bd26b..a7c805d 100644 --- a/caffe2/operators/h_softmax_op.h +++ b/caffe2/operators/h_softmax_op.h @@ -14,8 +14,9 @@ template class HSoftmaxOpBase : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - HSoftmaxOpBase(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws) { + template + explicit HSoftmaxOpBase(Args&&... args) + : Operator(std::forward(args)...) { HierarchyProto hierarchy; CAFFE_ENFORCE(hierarchy.ParseFromString( this->template GetSingleArgument("hierarchy", ""))); @@ -114,8 +115,9 @@ template class HSoftmaxSearchOp final : public HSoftmaxOp { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - HSoftmaxSearchOp(const OperatorDef& operator_def, Workspace* ws) - : HSoftmaxOp(operator_def, ws), + template + explicit HSoftmaxSearchOp(Args&&... args) + : HSoftmaxOp(std::forward(args)...), top_n_(this->template GetSingleArgument("topN", 5)), beam_(this->template GetSingleArgument("beam", 0.01f)) { CAFFE_ENFORCE(tree_.ParseFromString( @@ -146,9 +148,11 @@ template class HuffmanTreeHierarchyOp : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - HuffmanTreeHierarchyOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), - num_classes_(this->template GetSingleArgument("num_classes", -1)) {} + template + explicit HuffmanTreeHierarchyOp(Args&&... args) + : Operator(std::forward(args)...), + num_classes_(this->template GetSingleArgument("num_classes", -1)) { + } bool RunOnDevice() override; private: diff --git a/caffe2/operators/half_float_ops.h b/caffe2/operators/half_float_ops.h index a7c0dbe..2d9a2b4 100644 --- a/caffe2/operators/half_float_ops.h +++ b/caffe2/operators/half_float_ops.h @@ -26,8 +26,9 @@ class HalfToFloatOp : public Operator { class Float16ConstantFillOp : public Operator { public: - Float16ConstantFillOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit Float16ConstantFillOp(Args&&... args) + : Operator(std::forward(args)...), shape_(this->template GetRepeatedArgument("shape")) {} USE_OPERATOR_FUNCTIONS(CPUContext); @@ -41,8 +42,9 @@ class Float16ConstantFillOp : public Operator { class Float16UniformFillOp : public Operator { public: - Float16UniformFillOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit Float16UniformFillOp(Args&&... args) + : Operator(std::forward(args)...), shape_(this->template GetRepeatedArgument("shape")), min_(this->template GetSingleArgument("min", 0)), max_(this->template GetSingleArgument("max", 1)) { diff --git a/caffe2/operators/heatmap_max_keypoint_op.h b/caffe2/operators/heatmap_max_keypoint_op.h index 3a612ee..452daf2 100644 --- a/caffe2/operators/heatmap_max_keypoint_op.h +++ b/caffe2/operators/heatmap_max_keypoint_op.h @@ -13,8 +13,9 @@ namespace caffe2 { template class HeatmapMaxKeypointOp final : public Operator { public: - HeatmapMaxKeypointOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit HeatmapMaxKeypointOp(Args&&... args) + : Operator(std::forward(args)...), should_output_softmax_(this->template GetSingleArgument( "should_output_softmax", false)) {} diff --git a/caffe2/operators/if_op.h b/caffe2/operators/if_op.h index 7facf9b..be13b46 100644 --- a/caffe2/operators/if_op.h +++ b/caffe2/operators/if_op.h @@ -10,7 +10,7 @@ namespace caffe2 { template class IfOp final : public Operator { public: - IfOp(const OperatorDef& operator_def, Workspace* ws) + explicit IfOp(const OperatorDef& operator_def, Workspace* ws) : Operator(operator_def, ws) { CAFFE_ENFORCE( this->template HasSingleArgumentOfType("then_net"), diff --git a/caffe2/operators/im2col_op.h b/caffe2/operators/im2col_op.h index d63acd7..5bb07ea 100644 --- a/caffe2/operators/im2col_op.h +++ b/caffe2/operators/im2col_op.h @@ -12,8 +12,9 @@ template class Im2ColOp final : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - Im2ColOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit Im2ColOp(Args&&... args) + : Operator(std::forward(args)...), pad_(this->template GetSingleArgument("pad", 0)), kernel_h_(this->template GetSingleArgument( "kernel_h", @@ -157,8 +158,9 @@ template class Col2ImOp final : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - Col2ImOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit Col2ImOp(Args&&... args) + : Operator(std::forward(args)...), pad_(this->template GetSingleArgument("pad", 0)), kernel_h_(this->template GetSingleArgument( "kernel_h", diff --git a/caffe2/operators/index_hash_ops.h b/caffe2/operators/index_hash_ops.h index 9c9ad5b..12ac24c 100644 --- a/caffe2/operators/index_hash_ops.h +++ b/caffe2/operators/index_hash_ops.h @@ -11,8 +11,9 @@ template class IndexHashOp : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - IndexHashOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit IndexHashOp(Args&&... args) + : Operator(std::forward(args)...), seed_(this->template GetSingleArgument("seed", 0)), modulo_(this->template GetSingleArgument("modulo", 0)) { CAFFE_ENFORCE_GT(modulo_, 0, "MODULO should be > 0"); diff --git a/caffe2/operators/index_ops.cc b/caffe2/operators/index_ops.cc index b4db62b..b022708 100644 --- a/caffe2/operators/index_ops.cc +++ b/caffe2/operators/index_ops.cc @@ -15,8 +15,9 @@ namespace caffe2 { template class IndexCreateOp : public Operator { public: - IndexCreateOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit IndexCreateOp(Args&&... args) + : Operator(std::forward(args)...), maxElements_(OperatorBase::GetSingleArgument( "max_elements", std::numeric_limits::max())) {} @@ -33,8 +34,8 @@ class IndexCreateOp : public Operator { class IndexGetOp : public Operator { public: - IndexGetOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws) {} + template + explicit IndexGetOp(Args&&... args) : Operator(std::forward(args)...) {} bool RunOnDevice() override { return DispatchHelper::call(this, Input(1)); @@ -57,8 +58,9 @@ class IndexGetOp : public Operator { class IndexLoadOp : public Operator { public: - IndexLoadOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit IndexLoadOp(Args&&... args) + : Operator(std::forward(args)...), skipFirstEntry_( OperatorBase::GetSingleArgument("skip_first_entry", 0)) {} @@ -87,8 +89,9 @@ class IndexLoadOp : public Operator { class IndexStoreOp : public Operator { public: - IndexStoreOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws) {} + template + explicit IndexStoreOp(Args&&... args) + : Operator(std::forward(args)...) {} bool RunOnDevice() override { auto& base = OperatorBase::Input>(0); @@ -106,8 +109,9 @@ class IndexStoreOp : public Operator { class IndexFreezeOp : public Operator { public: - IndexFreezeOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws) {} + template + explicit IndexFreezeOp(Args&&... args) + : Operator(std::forward(args)...) {} bool RunOnDevice() override { auto& base = OperatorBase::Input>(0); @@ -118,8 +122,9 @@ class IndexFreezeOp : public Operator { class IndexSizeOp : public Operator { public: - IndexSizeOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws) {} + template + explicit IndexSizeOp(Args&&... args) + : Operator(std::forward(args)...) {} bool RunOnDevice() override { auto& base = OperatorBase::Input>(0); diff --git a/caffe2/operators/instance_norm_op.h b/caffe2/operators/instance_norm_op.h index 62505cc..cbd6bd4 100644 --- a/caffe2/operators/instance_norm_op.h +++ b/caffe2/operators/instance_norm_op.h @@ -11,8 +11,9 @@ template class InstanceNormOp : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - InstanceNormOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit InstanceNormOp(Args&&... args) + : Operator(std::forward(args)...), epsilon_(this->template GetSingleArgument("epsilon", 1e-5f)), order_(StringToStorageOrder( this->template GetSingleArgument("order", "NCHW"))) { @@ -51,8 +52,9 @@ template class InstanceNormGradientOp : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - InstanceNormGradientOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit InstanceNormGradientOp(Args&&... args) + : Operator(std::forward(args)...), epsilon_(this->template GetSingleArgument("epsilon", 1e-5f)), order_(StringToStorageOrder( this->template GetSingleArgument("order", "NCHW"))) { diff --git a/caffe2/operators/integral_image_op.h b/caffe2/operators/integral_image_op.h index a299218..d62d85e 100644 --- a/caffe2/operators/integral_image_op.h +++ b/caffe2/operators/integral_image_op.h @@ -11,8 +11,9 @@ namespace caffe2 { template class IntegralImageOp final : public Operator { public: - IntegralImageOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws) {} + template + explicit IntegralImageOp(Args&&... args) + : Operator(std::forward(args)...) {} USE_OPERATOR_CONTEXT_FUNCTIONS; bool RunOnDevice() override; @@ -21,8 +22,9 @@ class IntegralImageOp final : public Operator { template class IntegralImageGradientOp final : public Operator { public: - IntegralImageGradientOp(const OperatorDef& def, Workspace* ws) - : Operator(def, ws) {} + template + explicit IntegralImageGradientOp(Args&&... args) + : Operator(std::forward(args)...) {} USE_OPERATOR_CONTEXT_FUNCTIONS; bool RunOnDevice() override; diff --git a/caffe2/operators/key_split_ops.h b/caffe2/operators/key_split_ops.h index d31ae5f..f3eb3cd 100644 --- a/caffe2/operators/key_split_ops.h +++ b/caffe2/operators/key_split_ops.h @@ -12,8 +12,9 @@ class KeySplitOp : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - KeySplitOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit KeySplitOp(Args&&... args) + : Operator(std::forward(args)...), categorical_limit_( this->template GetSingleArgument("categorical_limit", 0)) { CAFFE_ENFORCE_GT(categorical_limit_, 0); diff --git a/caffe2/operators/last_n_window_collector.cc b/caffe2/operators/last_n_window_collector.cc index 2b0695f..1b141b6 100644 --- a/caffe2/operators/last_n_window_collector.cc +++ b/caffe2/operators/last_n_window_collector.cc @@ -11,8 +11,9 @@ template class LastNWindowCollectorOp : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - LastNWindowCollectorOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit LastNWindowCollectorOp(Args&&... args) + : Operator(std::forward(args)...), numToCollect_( OperatorBase::GetSingleArgument("num_to_collect", -1)) { CAFFE_ENFORCE_GT(numToCollect_, 0); diff --git a/caffe2/operators/layer_norm_op.h b/caffe2/operators/layer_norm_op.h index 2ab3691..2640f51 100644 --- a/caffe2/operators/layer_norm_op.h +++ b/caffe2/operators/layer_norm_op.h @@ -120,8 +120,9 @@ template class LayerNormGradientOp final : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - LayerNormGradientOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit LayerNormGradientOp(Args&&... args) + : Operator(std::forward(args)...), OP_SINGLE_ARG(int, "axis", axis_, 1) {} ~LayerNormGradientOp() {} diff --git a/caffe2/operators/leaky_relu_op.h b/caffe2/operators/leaky_relu_op.h index efda11a..98083ba 100644 --- a/caffe2/operators/leaky_relu_op.h +++ b/caffe2/operators/leaky_relu_op.h @@ -9,8 +9,9 @@ namespace caffe2 { template class LeakyReluOp : public Operator { public: - LeakyReluOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), alpha_(0.01) { + template + explicit LeakyReluOp(Args&&... args) + : Operator(std::forward(args)...), alpha_(0.01) { if (HasArgument("alpha")) { alpha_ = static_cast(this->template GetSingleArgument("alpha", 0.01)); @@ -28,8 +29,9 @@ class LeakyReluOp : public Operator { template class LeakyReluGradientOp final : public Operator { public: - LeakyReluGradientOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), alpha_(0.01) { + template + explicit LeakyReluGradientOp(Args&&... args) + : Operator(std::forward(args)...), alpha_(0.01) { if (HasArgument("alpha")) { alpha_ = static_cast(this->template GetSingleArgument("alpha", 0.01)); diff --git a/caffe2/operators/length_split_op.h b/caffe2/operators/length_split_op.h index 87eba85..a5bee08 100644 --- a/caffe2/operators/length_split_op.h +++ b/caffe2/operators/length_split_op.h @@ -14,8 +14,9 @@ class LengthsSplitOp final : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - LengthsSplitOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit LengthsSplitOp(Args&&... args) + : Operator(std::forward(args)...), n_split_(OperatorBase::GetSingleArgument("n_split", 0)) { if (InputSize() == 1) { // If not specified, then must have this argument diff --git a/caffe2/operators/lengths_pad_op.h b/caffe2/operators/lengths_pad_op.h index a51e0c9..c0019b6 100644 --- a/caffe2/operators/lengths_pad_op.h +++ b/caffe2/operators/lengths_pad_op.h @@ -10,8 +10,9 @@ template class LengthsPadOp : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - LengthsPadOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit LengthsPadOp(Args&&... args) + : Operator(std::forward(args)...), OP_SINGLE_ARG(double, "padding_value", padding_value_, -1), OP_SINGLE_ARG(int, "target_length", target_length_, -1) { CAFFE_ENFORCE_GE(target_length_, 1, "target_length argument must be >= 1"); diff --git a/caffe2/operators/lengths_reducer_ops.h b/caffe2/operators/lengths_reducer_ops.h index 5c6d8f3..c5f3428 100644 --- a/caffe2/operators/lengths_reducer_ops.h +++ b/caffe2/operators/lengths_reducer_ops.h @@ -18,8 +18,9 @@ template < class CPUSparseLengthsReductionOp : public Operator { public: USE_OPERATOR_FUNCTIONS(CPUContext); - CPUSparseLengthsReductionOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws) { + template + explicit CPUSparseLengthsReductionOp(Args&&... args) + : Operator(std::forward(args)...) { static_assert( !(USE_WEIGHT & USE_MEAN), "Cannot both specify weight and mean."); } diff --git a/caffe2/operators/lengths_top_k_op.h b/caffe2/operators/lengths_top_k_op.h index 9f5d1cb..b6a1a88 100644 --- a/caffe2/operators/lengths_top_k_op.h +++ b/caffe2/operators/lengths_top_k_op.h @@ -14,8 +14,10 @@ class LengthsTopKOp : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - LengthsTopKOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), OP_SINGLE_ARG(int, "k", k_, -1) { + template + explicit LengthsTopKOp(Args&&... args) + : Operator(std::forward(args)...), + OP_SINGLE_ARG(int, "k", k_, -1) { CAFFE_ENFORCE_GE(k_, 1, "k argument must be >= 1"); } @@ -30,8 +32,10 @@ class LengthsTopKOp : public Operator { template class LengthsTopKGradientOp : public Operator { public: - LengthsTopKGradientOp(const OperatorDef& def, Workspace* ws) - : Operator(def, ws), OP_SINGLE_ARG(int, "k", k_, -1) { + template + explicit LengthsTopKGradientOp(Args&&... args) + : Operator(std::forward(args)...), + OP_SINGLE_ARG(int, "k", k_, -1) { CAFFE_ENFORCE_GE(k_, 1, "k argument must be >= 1"); } USE_OPERATOR_CONTEXT_FUNCTIONS; diff --git a/caffe2/operators/listwise_l2r_op.h b/caffe2/operators/listwise_l2r_op.h index f750e43..425f7e4 100644 --- a/caffe2/operators/listwise_l2r_op.h +++ b/caffe2/operators/listwise_l2r_op.h @@ -12,8 +12,9 @@ namespace caffe2 { template class LambdaRankNdcgOp final : public Operator { public: - LambdaRankNdcgOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit LambdaRankNdcgOp(Args&&... args) + : Operator(std::forward(args)...), use_ndcg_as_loss_( this->template GetSingleArgument("use_ndcg_as_loss", false)), use_exp_gain_( diff --git a/caffe2/operators/load_save_op.h b/caffe2/operators/load_save_op.h index d1c2a87..aae6a1d 100644 --- a/caffe2/operators/load_save_op.h +++ b/caffe2/operators/load_save_op.h @@ -40,7 +40,7 @@ template class DBExistsOp final : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - DBExistsOp(const OperatorDef& operator_def, Workspace* ws) + explicit DBExistsOp(const OperatorDef& operator_def, Workspace* ws) : Operator(operator_def, ws), ws_(ws), absolute_path_( @@ -70,7 +70,7 @@ template class LoadOp final : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - LoadOp(const OperatorDef& operator_def, Workspace* ws) + explicit LoadOp(const OperatorDef& operator_def, Workspace* ws) : Operator(operator_def, ws), ws_(ws), absolute_path_( @@ -408,7 +408,7 @@ template class SaveOp final : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - SaveOp(const OperatorDef& operator_def, Workspace* ws) + explicit SaveOp(const OperatorDef& operator_def, Workspace* ws) : Operator(operator_def, ws), ws_(ws), absolute_path_( @@ -521,7 +521,7 @@ string FormatString(const string& pattern, Ts... values) { template class CheckpointOp final : public Operator { public: - CheckpointOp(const OperatorDef& operator_def, Workspace* ws) + explicit CheckpointOp(const OperatorDef& operator_def, Workspace* ws) : Operator(operator_def, ws), db_pattern_(this->template GetSingleArgument("db", "")), every_(this->template GetSingleArgument("every", 1)), diff --git a/caffe2/operators/local_response_normalization_op.h b/caffe2/operators/local_response_normalization_op.h index aa07426..b0b02a7 100644 --- a/caffe2/operators/local_response_normalization_op.h +++ b/caffe2/operators/local_response_normalization_op.h @@ -12,8 +12,9 @@ template class LRNOpBase : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - LRNOpBase(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit LRNOpBase(Args&&... args) + : Operator(std::forward(args)...), size_(this->template GetSingleArgument("size", 0)), alpha_(this->template GetSingleArgument("alpha", 0)), beta_(this->template GetSingleArgument("beta", 0)), @@ -57,8 +58,9 @@ template class LRNOp final : public LRNOpBase { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - LRNOp(const OperatorDef& operator_def, Workspace* ws) - : LRNOpBase(operator_def, ws) {} + template + explicit LRNOp(Args&&... args) + : LRNOpBase(std::forward(args)...) {} bool RunOnDeviceWithOrderNCHW() override; bool RunOnDeviceWithOrderNHWC() override; @@ -74,8 +76,9 @@ template class LRNGradientOp final : public LRNOpBase { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - LRNGradientOp(const OperatorDef& operator_def, Workspace* ws) - : LRNOpBase(operator_def, ws) {} + template + explicit LRNGradientOp(Args&&... args) + : LRNOpBase(std::forward(args)...) {} bool RunOnDeviceWithOrderNCHW() override; bool RunOnDeviceWithOrderNHWC() override; diff --git a/caffe2/operators/local_response_normalization_op_cudnn.cc b/caffe2/operators/local_response_normalization_op_cudnn.cc index 02937a7..5795ac6 100644 --- a/caffe2/operators/local_response_normalization_op_cudnn.cc +++ b/caffe2/operators/local_response_normalization_op_cudnn.cc @@ -9,8 +9,9 @@ class CuDNNLRNOp final : public Operator { public: USE_OPERATOR_FUNCTIONS(CUDAContext); - CuDNNLRNOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit CuDNNLRNOp(Args&&... args) + : Operator(std::forward(args)...), cudnn_wrapper_(&context_), size_(OperatorBase::GetSingleArgument("size", 0)), alpha_(OperatorBase::GetSingleArgument("alpha", 0)), @@ -51,8 +52,9 @@ class CuDNNLRNOp final : public Operator { class CuDNNLRNGradientOp final : public Operator { public: USE_OPERATOR_FUNCTIONS(CUDAContext); - CuDNNLRNGradientOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit CuDNNLRNGradientOp(Args&&... args) + : Operator(std::forward(args)...), cudnn_wrapper_(&context_), size_(OperatorBase::GetSingleArgument("size", 0)), alpha_(OperatorBase::GetSingleArgument("alpha", 0)), diff --git a/caffe2/operators/locally_connected_op.h b/caffe2/operators/locally_connected_op.h index cf5bf63..88a5630 100644 --- a/caffe2/operators/locally_connected_op.h +++ b/caffe2/operators/locally_connected_op.h @@ -16,8 +16,9 @@ class LocallyConnectedOp final : public ConvPoolOpBase { public: USE_CONV_POOL_BASE_FUNCTIONS(Context); - LocallyConnectedOp(const OperatorDef& operator_def, Workspace* ws) - : ConvPoolOpBase(operator_def, ws) { + template + explicit LocallyConnectedOp(Args&&... args) + : ConvPoolOpBase(std::forward(args)...) { // Since this is the default locally connected implementation, we will // use CAFFE_ENFORCE instead of OPERATOR_NEEDS_FEATURE. CAFFE_ENFORCE( @@ -68,8 +69,9 @@ class LocallyConnectedGradientOp final : public ConvPoolOpBase { public: USE_CONV_POOL_BASE_FUNCTIONS(Context); - LocallyConnectedGradientOp(const OperatorDef& operator_def, Workspace* ws) - : ConvPoolOpBase(operator_def, ws), + template + explicit LocallyConnectedGradientOp(Args&&... args) + : ConvPoolOpBase(std::forward(args)...), OP_SINGLE_ARG(bool, "no_bias", no_bias_, false) { CAFFE_ENFORCE( !(no_bias_ && OutputSize() == 3), diff --git a/caffe2/operators/logit_op.h b/caffe2/operators/logit_op.h index 0f0cd1b..f2cd139 100644 --- a/caffe2/operators/logit_op.h +++ b/caffe2/operators/logit_op.h @@ -25,8 +25,9 @@ template class LogitGradientOp final : public Operator { public: USE_OPERATOR_CONTEXT_FUNCTIONS; - LogitGradientOp(const OperatorDef& operator_def, Workspace* ws) - : Operator(operator_def, ws), + template + explicit LogitGradientOp(Args&&... args) + : Operator(std::forward(args)...), eps_(this->template GetSingleArgument("eps", 1e-6f)) {} ~LogitGradientOp() {} -- 2.7.4