return [(grad - _sum(grad * orig, orig.attrs.axis, True)) * orig]
+@register_gradient("nn.log_softmax")
+def log_softmax_grad(orig, grad):
+ """Gradient of log_softmax"""
+ x = orig.args[0]
+ sm = _nn.softmax(x, axis=orig.attrs.axis)
+ grad = grad / sm
+ return softmax_grad(sm, grad)
+
+
@register_gradient("nn.bias_add")
def bias_add_grad(orig, grad):
"""Returns gradient of bias_add"""
# specific language governing permissions and limitations
# under the License.
import numpy as np
+import pytest
import tvm
from tvm import relay
def test_softmax_grad():
data = relay.var("data", relay.TensorType((1, 16), "float64"))
fwd_func = relay.Function([data], relay.nn.softmax(data))
- check_grad(fwd_func)
+ check_grad(fwd_func, scale=1)
+
+
+def test_log_softmax_grad():
+ data = relay.var("data", relay.TensorType((2, 16), "float64"))
+ fwd_func = relay.Function([data], relay.nn.log_softmax(data))
+ check_grad(fwd_func, scale=1)
def test_bias_add_grad():
if __name__ == "__main__":
- test_unary_op()
- test_binary_op()
- test_bias_add_grad()
+ pytest.main([__file__])