--- /dev/null
+ir_version: 3
+producer_name: "pytorch"
+producer_version: "0.4"
+graph {
+ node {
+ input: "0"
+ output: "1"
+ op_type: "Sigmoid"
+ }
+ node {
+ input: "1"
+ output: "2"
+ op_type: "Log"
+ }
+ name: "torch-jit-export"
+ input {
+ name: "0"
+ type {
+ tensor_type {
+ elem_type: FLOAT
+ shape {
+ dim {
+ dim_value: 1
+ }
+ dim {
+ dim_value: 2
+ }
+ dim {
+ dim_value: 3
+ }
+ dim {
+ dim_value: 4
+ }
+ }
+ }
+ }
+ }
+ output {
+ name: "2"
+ type {
+ tensor_type {
+ elem_type: FLOAT
+ shape {
+ dim {
+ dim_value: 1
+ }
+ dim {
+ dim_value: 2
+ }
+ dim {
+ dim_value: 3
+ }
+ dim {
+ dim_value: 4
+ }
+ }
+ }
+ }
+ }
+}
+opset_import {
+ version: 9
+}
x = torch.randn(1, 2, 3, 4)
self.assertONNX(torch.nn.RReLU(), x)
+ def test_log_sigmoid(self):
+ x = torch.randn(1, 2, 3, 4)
+ self.assertONNX(torch.nn.LogSigmoid(), x)
+
def test_linear(self):
x = torch.randn(3, 4)
self.assertONNX(torch.nn.Linear(4, 5, bias=True), x)
def rrelu(g, input, lower, upper, training, generator):
p = g.op('RandomUniformLike', input, high_f=upper, low_f=lower)
return g.op('PRelu', input, p)
+
+
+@parse_args('v')
+def log_sigmoid(g, input):
+ p = g.op('Sigmoid', input)
+ return g.op('Log', p)