gtl::ArraySlice<Gradient*> output_gradients,
std::vector<Gradient*>* result);
+ bool IsPersistent() const { return persistent_; }
+
private:
TensorTape tensor_tape_;
OpTape<BackwardFunction> op_tape_;
g, = backprop.gradients_function(loss, [0])(logits, labels)
self.assertAllEqual(g.numpy(), [[-0.5, 0.5]])
+ def testGradientWithinTapeBlock(self):
+ v1 = resource_variable_ops.ResourceVariable(1.)
+ with backprop.GradientTape() as t:
+ loss = 2 * v1
+ with self.assertRaises(RuntimeError):
+ t.gradient(loss, [v1])
+ with backprop.GradientTape(persistent=True) as t:
+ loss = 2 * v1
+ grad = t.gradient(loss, [v1])
+ self.assertAllEqual(grad[0], 2.0)
+
@test_util.assert_no_new_tensors
def testSecondGrad(self):
PyObject* TFE_Py_TapeGradient(PyObject* tape, PyObject* vspace,
PyObject* target, PyObject* sources,
PyObject* output_gradients, TF_Status* status) {
+ TFE_Py_Tape* tape_obj = reinterpret_cast<TFE_Py_Tape*>(tape);
+ if (!tape_obj->tape->IsPersistent()) {
+ auto* tape_set = GetTapeSet();
+ if (tape_set->find(tape_obj) != tape_set->end()) {
+ PyErr_SetString(PyExc_RuntimeError,
+ "Trying to call tape.gradient on a non-persistent tape "
+ "while it is still active.");
+ return nullptr;
+ }
+ }
PyVSpace c_vspace(vspace);
if (!c_vspace.Initialize().ok()) {
return nullptr;
Py_INCREF(tensor);
}
}
- TFE_Py_Tape* tape_obj = reinterpret_cast<TFE_Py_Tape*>(tape);
std::vector<PyObject*> result;
status->status = tape_obj->tape->ComputeGradient(
c_vspace, target_vec, sources_vec, outgrad_vec, &result);