self._added_graphs = []
self._added_meta_graphs = []
self._added_session_logs = []
+ self._added_run_metadata = {}
@property
def summaries(self):
# pylint: disable=unused-argument
self._added_session_logs.append(session_log)
+ def add_run_metadata(self, run_metadata, tag, global_step=None):
+ if (global_step is not None) and (global_step < 0):
+ raise ValueError('Invalid global_step %s.' % global_step)
+ self._added_run_metadata[tag] = run_metadata
+
def flush(self):
pass
showing the sizes and lifetimes of tensors.
"""
self._output_file = os.path.join(output_dir, "timeline-{}.json")
+ self._file_writer = SummaryWriterCache.get(output_dir)
self._show_dataflow = show_dataflow
self._show_memory = show_memory
self._timer = SecondOrStepTimer(
self._save(global_step,
self._output_file.format(global_step),
run_values.run_metadata.step_stats)
+ self._file_writer.add_run_metadata(run_values.run_metadata,
+ "step_%d" % global_step)
self._next_step = global_step + 1
sess.run(self.train_op) # Saved.
self.assertEqual(3, self._count_timeline_files())
+ def test_run_metadata_saves_in_first_step(self):
+ writer_cache.FileWriterCache.clear()
+ fake_summary_writer.FakeSummaryWriter.install()
+ fake_writer = writer_cache.FileWriterCache.get(self.output_dir)
+ with self.graph.as_default():
+ hook = basic_session_run_hooks.ProfilerHook(
+ save_secs=2, output_dir=self.output_dir)
+ with monitored_session.SingularMonitoredSession(hooks=[hook]) as sess:
+ sess.run(self.train_op) # Saved.
+ self.assertEqual(
+ list(fake_writer._added_run_metadata.keys()), ['step_1'])
+ fake_summary_writer.FakeSummaryWriter.uninstall()
+
if __name__ == '__main__':
test.main()