• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15
16"""Operations to emit summaries."""
17
18from __future__ import absolute_import
19from __future__ import division
20from __future__ import print_function
21
22import abc
23import collections
24import functools
25import getpass
26import os
27import re
28import threading
29import time
30
31import six
32
33from tensorflow.core.framework import graph_pb2
34from tensorflow.core.framework import summary_pb2
35from tensorflow.core.protobuf import config_pb2
36from tensorflow.python.eager import context
37from tensorflow.python.eager import profiler as _profiler
38from tensorflow.python.framework import constant_op
39from tensorflow.python.framework import dtypes
40from tensorflow.python.framework import ops
41from tensorflow.python.framework import smart_cond
42from tensorflow.python.framework import tensor_util
43from tensorflow.python.ops import array_ops
44from tensorflow.python.ops import control_flow_ops
45from tensorflow.python.ops import gen_summary_ops
46from tensorflow.python.ops import math_ops
47from tensorflow.python.ops import resource_variable_ops
48from tensorflow.python.ops import summary_op_util
49from tensorflow.python.platform import tf_logging as logging
50from tensorflow.python.training import training_util
51from tensorflow.python.util import deprecation
52from tensorflow.python.util import tf_contextlib
53from tensorflow.python.util.tf_export import tf_export
54
55# A global dictionary mapping graph keys to a list of summary writer init ops.
56_SUMMARY_WRITER_INIT_OP = {}
57
58_EXPERIMENT_NAME_PATTERNS = re.compile(r"^[^\x00-\x1F<>]{0,256}$")
59_RUN_NAME_PATTERNS = re.compile(r"^[^\x00-\x1F<>]{0,512}$")
60_USER_NAME_PATTERNS = re.compile(r"^[a-z]([-a-z0-9]{0,29}[a-z0-9])?$", re.I)
61
62
63def _should_record_summaries_internal(default_state):
64  """Returns boolean Tensor if summaries should/shouldn't be recorded.
65
66  Now the summary condition is decided by logical "and" of two conditions:
67  ctx.summary_recording and ctx.summary_recording_distribution_strategy. The
68  former one is usually set by user, and the latter one is controlled by
69  DistributionStrategy (tf.distribute.ReplicaContext).
70
71  Args:
72    default_state: can be True or False. The default summary behavior when user
73      does not specify ctx.summary_recording and
74      ctx.summary_recording_distribution_strategy is True.
75  """
76  ctx = context.context()
77  resolve = lambda x: x() if callable(x) else x
78  cond_distributed = resolve(ctx.summary_recording_distribution_strategy)
79  cond = resolve(ctx.summary_recording)
80  if cond is None:
81    cond = default_state
82  return math_ops.logical_and(cond_distributed, cond)
83
84
85def _should_record_summaries_v2():
86  """Returns boolean Tensor which is true if summaries should be recorded.
87
88  If no recording status has been set, this defaults to True, unlike the public
89  should_record_summaries().
90  """
91  return _should_record_summaries_internal(default_state=True)
92
93
94def should_record_summaries():
95  """Returns boolean Tensor which is true if summaries should be recorded."""
96  return _should_record_summaries_internal(default_state=False)
97
98
99@tf_export("summary.record_if", v1=[])
100@tf_contextlib.contextmanager
101def record_if(condition):
102  """Sets summary recording on or off per the provided boolean value.
103
104  The provided value can be a python boolean, a scalar boolean Tensor, or
105  or a callable providing such a value; if a callable is passed it will be
106  invoked on-demand to determine whether summary writing will occur.
107
108  Args:
109    condition: can be True, False, a bool Tensor, or a callable providing such.
110
111  Yields:
112    Returns a context manager that sets this value on enter and restores the
113    previous value on exit.
114  """
115  old = context.context().summary_recording
116  try:
117    context.context().summary_recording = condition
118    yield
119  finally:
120    context.context().summary_recording = old
121
122
123# TODO(apassos) consider how to handle local step here.
124def record_summaries_every_n_global_steps(n, global_step=None):
125  """Sets the should_record_summaries Tensor to true if global_step % n == 0."""
126  if global_step is None:
127    global_step = training_util.get_or_create_global_step()
128  with ops.device("cpu:0"):
129    should = lambda: math_ops.equal(global_step % n, 0)
130    if not context.executing_eagerly():
131      should = should()
132  return record_if(should)
133
134
135def always_record_summaries():
136  """Sets the should_record_summaries Tensor to always true."""
137  return record_if(True)
138
139
140def never_record_summaries():
141  """Sets the should_record_summaries Tensor to always false."""
142  return record_if(False)
143
144
145@tf_export("summary.experimental.get_step", v1=[])
146def get_step():
147  """Returns the default summary step for the current thread.
148
149  Returns:
150    The step set by `tf.summary.experimental.set_step()` if one has been set,
151    otherwise None.
152  """
153  return context.context().summary_step
154
155
156@tf_export("summary.experimental.set_step", v1=[])
157def set_step(step):
158  """Sets the default summary step for the current thread.
159
160  For convenience, this function sets a default value for the `step` parameter
161  used in summary-writing functions elsewhere in the API so that it need not
162  be explicitly passed in every such invocation. The value can be a constant
163  or a variable, and can be retrieved via `tf.summary.experimental.get_step()`.
164
165  Note: when using this with @tf.functions, the step value will be captured at
166  the time the function is traced, so changes to the step outside the function
167  will not be reflected inside the function unless using a `tf.Variable` step.
168
169  Args:
170    step: An `int64`-castable default step value, or None to unset.
171  """
172  context.context().summary_step = step
173
174
175@tf_export("summary.SummaryWriter", v1=[])
176@six.add_metaclass(abc.ABCMeta)
177class SummaryWriter(object):
178  """Interface representing a stateful summary writer object."""
179
180  @abc.abstractmethod
181  def set_as_default(self):
182    """Enables this summary writer for the current thread."""
183    raise NotImplementedError()
184
185  @abc.abstractmethod
186  @tf_contextlib.contextmanager
187  def as_default(self):
188    """Returns a context manager that enables summary writing."""
189    raise NotImplementedError()
190
191  def init(self):
192    """Initializes the summary writer."""
193    raise NotImplementedError()
194
195  def flush(self):
196    """Flushes any buffered data."""
197    raise NotImplementedError()
198
199  def close(self):
200    """Flushes and closes the summary writer."""
201    raise NotImplementedError()
202
203
204class ResourceSummaryWriter(SummaryWriter):
205  """Implementation of SummaryWriter using a SummaryWriterInterface resource."""
206
207  def  __init__(self, shared_name, init_op_fn, name=None, v2=False):
208    self._resource = gen_summary_ops.summary_writer(
209        shared_name=shared_name, name=name)
210    # TODO(nickfelt): cache other constructed ops in graph mode
211    self._init_op_fn = init_op_fn
212    self._init_op = init_op_fn(self._resource)
213    self._v2 = v2
214    self._closed = False
215    if context.executing_eagerly():
216      self._resource_deleter = resource_variable_ops.EagerResourceDeleter(
217          handle=self._resource, handle_device="cpu:0")
218    else:
219      global _SUMMARY_WRITER_INIT_OP
220      key = ops.get_default_graph()._graph_key  # pylint: disable=protected-access
221      _SUMMARY_WRITER_INIT_OP.setdefault(key, []).append(self._init_op)
222
223  def set_as_default(self):
224    """Enables this summary writer for the current thread."""
225    if self._v2 and context.executing_eagerly() and self._closed:
226      raise RuntimeError("SummaryWriter is already closed")
227    context.context().summary_writer = self
228
229  @tf_contextlib.contextmanager
230  def as_default(self):
231    """Returns a context manager that enables summary writing."""
232    if self._v2 and context.executing_eagerly() and self._closed:
233      raise RuntimeError("SummaryWriter is already closed")
234    old = context.context().summary_writer
235    try:
236      context.context().summary_writer = self
237      yield self
238      # Flushes the summary writer in eager mode or in graph functions, but
239      # not in legacy graph mode (you're on your own there).
240      self.flush()
241    finally:
242      context.context().summary_writer = old
243
244  def init(self):
245    """Initializes the summary writer."""
246    if self._v2:
247      if context.executing_eagerly() and self._closed:
248        raise RuntimeError("SummaryWriter is already closed")
249      return self._init_op
250    # Legacy behavior allows re-initializing the resource.
251    return self._init_op_fn(self._resource)
252
253  def flush(self):
254    """Flushes any buffered data."""
255    if self._v2 and context.executing_eagerly() and self._closed:
256      return
257    return _flush_fn(writer=self)
258
259  def close(self):
260    """Flushes and closes the summary writer."""
261    if self._v2 and context.executing_eagerly() and self._closed:
262      return
263    try:
264      with ops.control_dependencies([self.flush()]):
265        with ops.device("cpu:0"):
266          return gen_summary_ops.close_summary_writer(self._resource)
267    finally:
268      if self._v2 and context.executing_eagerly():
269        self._closed = True
270
271
272class NoopSummaryWriter(SummaryWriter):
273  """A summary writer that does nothing, for create_noop_writer()."""
274
275  def set_as_default(self):
276    pass
277
278  @tf_contextlib.contextmanager
279  def as_default(self):
280    yield
281
282  def init(self):
283    pass
284
285  def flush(self):
286    pass
287
288  def close(self):
289    pass
290
291
292@tf_export(v1=["summary.initialize"])
293def initialize(
294    graph=None,  # pylint: disable=redefined-outer-name
295    session=None):
296  """Initializes summary writing for graph execution mode.
297
298  This operation is a no-op when executing eagerly.
299
300  This helper method provides a higher-level alternative to using
301  `tf.contrib.summary.summary_writer_initializer_op` and
302  `tf.contrib.summary.graph`.
303
304  Most users will also want to call `tf.compat.v1.train.create_global_step`
305  which can happen before or after this function is called.
306
307  Args:
308    graph: A `tf.Graph` or `tf.GraphDef` to output to the writer.
309      This function will not write the default graph by default. When
310      writing to an event log file, the associated step will be zero.
311    session: So this method can call `tf.Session.run`. This defaults
312      to `tf.get_default_session`.
313
314  Raises:
315    RuntimeError: If  the current thread has no default
316      `tf.contrib.summary.SummaryWriter`.
317    ValueError: If session wasn't passed and no default session.
318  """
319  if context.executing_eagerly():
320    return
321  if context.context().summary_writer is None:
322    raise RuntimeError("No default tf.contrib.summary.SummaryWriter found")
323  if session is None:
324    session = ops.get_default_session()
325    if session is None:
326      raise ValueError("session must be passed if no default session exists")
327  session.run(summary_writer_initializer_op())
328  if graph is not None:
329    data = _serialize_graph(graph)
330    x = array_ops.placeholder(dtypes.string)
331    session.run(_graph(x, 0), feed_dict={x: data})
332
333
334@tf_export("summary.create_file_writer", v1=[])
335def create_file_writer_v2(logdir,
336                          max_queue=None,
337                          flush_millis=None,
338                          filename_suffix=None,
339                          name=None):
340  """Creates a summary file writer for the given log directory.
341
342  Args:
343    logdir: a string specifying the directory in which to write an event file.
344    max_queue: the largest number of summaries to keep in a queue; will
345     flush once the queue gets bigger than this. Defaults to 10.
346    flush_millis: the largest interval between flushes. Defaults to 120,000.
347    filename_suffix: optional suffix for the event file name. Defaults to `.v2`.
348    name: a name for the op that creates the writer.
349
350  Returns:
351    A SummaryWriter object.
352  """
353  if logdir is None:
354    raise ValueError("logdir cannot be None")
355  inside_function = ops.inside_function()
356  with ops.name_scope(name, "create_file_writer") as scope, ops.device("cpu:0"):
357    # Run init inside an init_scope() to hoist it out of tf.functions.
358    with ops.init_scope():
359      if context.executing_eagerly():
360        _check_create_file_writer_args(
361            inside_function,
362            logdir=logdir,
363            max_queue=max_queue,
364            flush_millis=flush_millis,
365            filename_suffix=filename_suffix)
366      logdir = ops.convert_to_tensor(logdir, dtype=dtypes.string)
367      if max_queue is None:
368        max_queue = constant_op.constant(10)
369      if flush_millis is None:
370        flush_millis = constant_op.constant(2 * 60 * 1000)
371      if filename_suffix is None:
372        filename_suffix = constant_op.constant(".v2")
373      # Prepend the PID and a process-local UID to the filename suffix to avoid
374      # filename collisions within the machine (the filename already contains
375      # the hostname to avoid cross-machine collisions).
376      unique_prefix = constant_op.constant(".%s.%s" % (os.getpid(), ops.uid()))
377      filename_suffix = unique_prefix + filename_suffix
378      # Use a unique shared_name to prevent resource sharing.
379      if context.executing_eagerly():
380        shared_name = context.shared_name()
381      else:
382        shared_name = ops._name_from_scope_name(scope)  # pylint: disable=protected-access
383      return ResourceSummaryWriter(
384          shared_name=shared_name,
385          init_op_fn=functools.partial(
386              gen_summary_ops.create_summary_file_writer,
387              logdir=logdir,
388              max_queue=max_queue,
389              flush_millis=flush_millis,
390              filename_suffix=filename_suffix),
391          name=name,
392          v2=True)
393
394
395def create_file_writer(logdir,
396                       max_queue=None,
397                       flush_millis=None,
398                       filename_suffix=None,
399                       name=None):
400  """Creates a summary file writer in the current context under the given name.
401
402  Args:
403    logdir: a string, or None. If a string, creates a summary file writer
404     which writes to the directory named by the string. If None, returns
405     a mock object which acts like a summary writer but does nothing,
406     useful to use as a context manager.
407    max_queue: the largest number of summaries to keep in a queue; will
408     flush once the queue gets bigger than this. Defaults to 10.
409    flush_millis: the largest interval between flushes. Defaults to 120,000.
410    filename_suffix: optional suffix for the event file name. Defaults to `.v2`.
411    name: Shared name for this SummaryWriter resource stored to default
412      Graph. Defaults to the provided logdir prefixed with `logdir:`. Note: if a
413      summary writer resource with this shared name already exists, the returned
414      SummaryWriter wraps that resource and the other arguments have no effect.
415
416  Returns:
417    Either a summary writer or an empty object which can be used as a
418    summary writer.
419  """
420  if logdir is None:
421    return NoopSummaryWriter()
422  logdir = str(logdir)
423  with ops.device("cpu:0"):
424    if max_queue is None:
425      max_queue = constant_op.constant(10)
426    if flush_millis is None:
427      flush_millis = constant_op.constant(2 * 60 * 1000)
428    if filename_suffix is None:
429      filename_suffix = constant_op.constant(".v2")
430    if name is None:
431      name = "logdir:" + logdir
432    return ResourceSummaryWriter(
433        shared_name=name,
434        init_op_fn=functools.partial(
435            gen_summary_ops.create_summary_file_writer,
436            logdir=logdir,
437            max_queue=max_queue,
438            flush_millis=flush_millis,
439            filename_suffix=filename_suffix))
440
441
442def create_db_writer(db_uri,
443                     experiment_name=None,
444                     run_name=None,
445                     user_name=None,
446                     name=None):
447  """Creates a summary database writer in the current context.
448
449  This can be used to write tensors from the execution graph directly
450  to a database. Only SQLite is supported right now. This function
451  will create the schema if it doesn't exist. Entries in the Users,
452  Experiments, and Runs tables will be created automatically if they
453  don't already exist.
454
455  Args:
456    db_uri: For example "file:/tmp/foo.sqlite".
457    experiment_name: Defaults to YYYY-MM-DD in local time if None.
458      Empty string means the Run will not be associated with an
459      Experiment. Can't contain ASCII control characters or <>. Case
460      sensitive.
461    run_name: Defaults to HH:MM:SS in local time if None. Empty string
462      means a Tag will not be associated with any Run. Can't contain
463      ASCII control characters or <>. Case sensitive.
464    user_name: Defaults to system username if None. Empty means the
465      Experiment will not be associated with a User. Must be valid as
466      both a DNS label and Linux username.
467    name: Shared name for this SummaryWriter resource stored to default
468      `tf.Graph`.
469
470  Returns:
471    A `tf.summary.SummaryWriter` instance.
472  """
473  with ops.device("cpu:0"):
474    if experiment_name is None:
475      experiment_name = time.strftime("%Y-%m-%d", time.localtime(time.time()))
476    if run_name is None:
477      run_name = time.strftime("%H:%M:%S", time.localtime(time.time()))
478    if user_name is None:
479      user_name = getpass.getuser()
480    experiment_name = _cleanse_string(
481        "experiment_name", _EXPERIMENT_NAME_PATTERNS, experiment_name)
482    run_name = _cleanse_string("run_name", _RUN_NAME_PATTERNS, run_name)
483    user_name = _cleanse_string("user_name", _USER_NAME_PATTERNS, user_name)
484    return ResourceSummaryWriter(
485        shared_name=name,
486        init_op_fn=functools.partial(
487            gen_summary_ops.create_summary_db_writer,
488            db_uri=db_uri,
489            experiment_name=experiment_name,
490            run_name=run_name,
491            user_name=user_name))
492
493
494@tf_export("summary.create_noop_writer", v1=[])
495def create_noop_writer():
496  """Returns a summary writer that does nothing.
497
498  This is useful as a placeholder in code that expects a context manager.
499  """
500  return NoopSummaryWriter()
501
502
503def _cleanse_string(name, pattern, value):
504  if isinstance(value, six.string_types) and pattern.search(value) is None:
505    raise ValueError("%s (%s) must match %s" % (name, value, pattern.pattern))
506  return ops.convert_to_tensor(value, dtypes.string)
507
508
509def _nothing():
510  """Convenient else branch for when summaries do not record."""
511  return constant_op.constant(False)
512
513
514def all_summary_ops():
515  """Graph-mode only. Returns all summary ops.
516
517  Please note this excludes `tf.summary.graph` ops.
518
519  Returns:
520    The summary ops.
521  """
522  if context.executing_eagerly():
523    return None
524  return ops.get_collection(ops.GraphKeys._SUMMARY_COLLECTION)  # pylint: disable=protected-access
525
526
527def summary_writer_initializer_op():
528  """Graph-mode only. Returns the list of ops to create all summary writers.
529
530  Returns:
531    The initializer ops.
532
533  Raises:
534    RuntimeError: If in Eager mode.
535  """
536  if context.executing_eagerly():
537    raise RuntimeError(
538        "tf.contrib.summary.summary_writer_initializer_op is only "
539        "supported in graph mode.")
540  global _SUMMARY_WRITER_INIT_OP
541  key = ops.get_default_graph()._graph_key  # pylint: disable=protected-access
542  return _SUMMARY_WRITER_INIT_OP.setdefault(key, [])
543
544
545_INVALID_SCOPE_CHARACTERS = re.compile(r"[^-_/.A-Za-z0-9]")
546
547
548@tf_export("summary.summary_scope", v1=[])
549@tf_contextlib.contextmanager
550def summary_scope(name, default_name="summary", values=None):
551  """A context manager for use when defining a custom summary op.
552
553  This behaves similarly to `tf.name_scope`, except that it returns a generated
554  summary tag in addition to the scope name. The tag is structurally similar to
555  the scope name - derived from the user-provided name, prefixed with enclosing
556  name scopes if any - but we relax the constraint that it be uniquified, as
557  well as the character set limitation (so the user-provided name can contain
558  characters not legal for scope names; in the scope name these are removed).
559
560  This makes the summary tag more predictable and consistent for the user.
561
562  For example, to define a new summary op called `my_op`:
563
564  ```python
565  def my_op(name, my_value, step):
566    with tf.summary.summary_scope(name, "MyOp", [my_value]) as (tag, scope):
567      my_value = tf.convert_to_tensor(my_value)
568      return tf.summary.write(tag, my_value, step=step)
569  ```
570
571  Args:
572    name: string name for the summary.
573    default_name: Optional; if provided, used as default name of the summary.
574    values: Optional; passed as `values` parameter to name_scope.
575
576  Yields:
577    A tuple `(tag, scope)` as described above.
578  """
579  name = name or default_name
580  current_scope = ops.get_name_scope()
581  tag = current_scope + "/" + name if current_scope else name
582  # Strip illegal characters from the scope name, and if that leaves nothing,
583  # use None instead so we pick up the default name.
584  name = _INVALID_SCOPE_CHARACTERS.sub("", name) or None
585  with ops.name_scope(name, default_name, values) as scope:
586    yield tag, scope
587
588
589@tf_export("summary.write", v1=[])
590def write(tag, tensor, step=None, metadata=None, name=None):
591  """Writes a generic summary to the default SummaryWriter if one exists.
592
593  This exists primarily to support the definition of type-specific summary ops
594  like scalar() and image(), and is not intended for direct use unless defining
595  a new type-specific summary op.
596
597  Args:
598    tag: string tag used to identify the summary (e.g. in TensorBoard), usually
599      generated with `tf.summary.summary_scope`
600    tensor: the Tensor holding the summary data to write
601    step: Explicit `int64`-castable monotonic step value for this summary. If
602      omitted, this defaults to `tf.summary.experimental.get_step()`, which must
603      not be None.
604    metadata: Optional SummaryMetadata, as a proto or serialized bytes
605    name: Optional string name for this op.
606
607  Returns:
608    True on success, or false if no summary was written because no default
609    summary writer was available.
610
611  Raises:
612    ValueError: if a default writer exists, but no step was provided and
613      `tf.summary.experimental.get_step()` is None.
614  """
615  with ops.name_scope(name, "write_summary") as scope:
616    if context.context().summary_writer is None:
617      return constant_op.constant(False)
618    if step is None:
619      step = get_step()
620      if step is None:
621        raise ValueError("No step set via 'step' argument or "
622                         "tf.summary.experimental.set_step()")
623    if metadata is None:
624      serialized_metadata = b""
625    elif hasattr(metadata, "SerializeToString"):
626      serialized_metadata = metadata.SerializeToString()
627    else:
628      serialized_metadata = metadata
629
630    def record():
631      """Record the actual summary and return True."""
632      # Note the identity to move the tensor to the CPU.
633      with ops.device("cpu:0"):
634        write_summary_op = gen_summary_ops.write_summary(
635            context.context().summary_writer._resource,  # pylint: disable=protected-access
636            step,
637            array_ops.identity(tensor),
638            tag,
639            serialized_metadata,
640            name=scope)
641        with ops.control_dependencies([write_summary_op]):
642          return constant_op.constant(True)
643
644    return smart_cond.smart_cond(
645        _should_record_summaries_v2(), record, _nothing, name="summary_cond")
646
647
648def summary_writer_function(name, tensor, function, family=None):
649  """Helper function to write summaries.
650
651  Args:
652    name: name of the summary
653    tensor: main tensor to form the summary
654    function: function taking a tag and a scope which writes the summary
655    family: optional, the summary's family
656
657  Returns:
658    The result of writing the summary.
659  """
660  name_scope = ops.get_name_scope()
661  if name_scope:
662    # Add a slash to allow reentering the name scope.
663    name_scope += "/"
664  def record():
665    with ops.name_scope(name_scope), summary_op_util.summary_scope(
666        name, family, values=[tensor]) as (tag, scope):
667      with ops.control_dependencies([function(tag, scope)]):
668        return constant_op.constant(True)
669
670  if context.context().summary_writer is None:
671    return control_flow_ops.no_op()
672  with ops.device("cpu:0"):
673    op = smart_cond.smart_cond(
674        should_record_summaries(), record, _nothing, name="")
675    if not context.executing_eagerly():
676      ops.add_to_collection(ops.GraphKeys._SUMMARY_COLLECTION, op)  # pylint: disable=protected-access
677  return op
678
679
680def generic(name, tensor, metadata=None, family=None, step=None):
681  """Writes a tensor summary if possible."""
682
683  def function(tag, scope):
684    if metadata is None:
685      serialized_metadata = constant_op.constant("")
686    elif hasattr(metadata, "SerializeToString"):
687      serialized_metadata = constant_op.constant(metadata.SerializeToString())
688    else:
689      serialized_metadata = metadata
690    # Note the identity to move the tensor to the CPU.
691    return gen_summary_ops.write_summary(
692        context.context().summary_writer._resource,  # pylint: disable=protected-access
693        _choose_step(step),
694        array_ops.identity(tensor),
695        tag,
696        serialized_metadata,
697        name=scope)
698  return summary_writer_function(name, tensor, function, family=family)
699
700
701def scalar(name, tensor, family=None, step=None):
702  """Writes a scalar summary if possible.
703
704  Unlike `tf.contrib.summary.generic` this op may change the dtype
705  depending on the writer, for both practical and efficiency concerns.
706
707  Args:
708    name: An arbitrary name for this summary.
709    tensor: A `tf.Tensor` Must be one of the following types:
710      `float32`, `float64`, `int32`, `int64`, `uint8`, `int16`,
711      `int8`, `uint16`, `half`, `uint32`, `uint64`.
712    family: Optional, the summary's family.
713    step: The `int64` monotonic step variable, which defaults
714      to `tf.train.get_global_step`.
715
716  Returns:
717    The created `tf.Operation` or a `tf.no_op` if summary writing has
718    not been enabled for this context.
719  """
720
721  def function(tag, scope):
722    # Note the identity to move the tensor to the CPU.
723    return gen_summary_ops.write_scalar_summary(
724        context.context().summary_writer._resource,  # pylint: disable=protected-access
725        _choose_step(step),
726        tag,
727        array_ops.identity(tensor),
728        name=scope)
729
730  return summary_writer_function(name, tensor, function, family=family)
731
732
733def histogram(name, tensor, family=None, step=None):
734  """Writes a histogram summary if possible."""
735
736  def function(tag, scope):
737    # Note the identity to move the tensor to the CPU.
738    return gen_summary_ops.write_histogram_summary(
739        context.context().summary_writer._resource,  # pylint: disable=protected-access
740        _choose_step(step),
741        tag,
742        array_ops.identity(tensor),
743        name=scope)
744
745  return summary_writer_function(name, tensor, function, family=family)
746
747
748def image(name, tensor, bad_color=None, max_images=3, family=None, step=None):
749  """Writes an image summary if possible."""
750
751  def function(tag, scope):
752    bad_color_ = (constant_op.constant([255, 0, 0, 255], dtype=dtypes.uint8)
753                  if bad_color is None else bad_color)
754    # Note the identity to move the tensor to the CPU.
755    return gen_summary_ops.write_image_summary(
756        context.context().summary_writer._resource,  # pylint: disable=protected-access
757        _choose_step(step),
758        tag,
759        array_ops.identity(tensor),
760        bad_color_,
761        max_images,
762        name=scope)
763
764  return summary_writer_function(name, tensor, function, family=family)
765
766
767def audio(name, tensor, sample_rate, max_outputs, family=None, step=None):
768  """Writes an audio summary if possible."""
769
770  def function(tag, scope):
771    # Note the identity to move the tensor to the CPU.
772    return gen_summary_ops.write_audio_summary(
773        context.context().summary_writer._resource,  # pylint: disable=protected-access
774        _choose_step(step),
775        tag,
776        array_ops.identity(tensor),
777        sample_rate=sample_rate,
778        max_outputs=max_outputs,
779        name=scope)
780
781  return summary_writer_function(name, tensor, function, family=family)
782
783
784def graph(param, step=None, name=None):
785  """Writes a TensorFlow graph to the summary interface.
786
787  The graph summary is, strictly speaking, not a summary. Conditions
788  like `tf.summary.should_record_summaries` do not apply. Only
789  a single graph can be associated with a particular run. If multiple
790  graphs are written, then only the last one will be considered by
791  TensorBoard.
792
793  When not using eager execution mode, the user should consider passing
794  the `graph` parameter to `tf.contrib.summary.initialize` instead of
795  calling this function. Otherwise special care needs to be taken when
796  using the graph to record the graph.
797
798  Args:
799    param: A `tf.Tensor` containing a serialized graph proto. When
800      eager execution is enabled, this function will automatically
801      coerce `tf.Graph`, `tf.GraphDef`, and string types.
802    step: The global step variable. This doesn't have useful semantics
803      for graph summaries, but is used anyway, due to the structure of
804      event log files. This defaults to the global step.
805    name: A name for the operation (optional).
806
807  Returns:
808    The created `tf.Operation` or a `tf.no_op` if summary writing has
809    not been enabled for this context.
810
811  Raises:
812    TypeError: If `param` isn't already a `tf.Tensor` in graph mode.
813  """
814  if not context.executing_eagerly() and not isinstance(param, ops.Tensor):
815    raise TypeError("graph() needs a tf.Tensor (e.g. tf.placeholder) in graph "
816                    "mode, but was: %s" % type(param))
817  writer = context.context().summary_writer
818  if writer is None:
819    return control_flow_ops.no_op()
820  with ops.device("cpu:0"):
821    if isinstance(param, (ops.Graph, graph_pb2.GraphDef)):
822      tensor = ops.convert_to_tensor(_serialize_graph(param), dtypes.string)
823    else:
824      tensor = array_ops.identity(param)
825    return gen_summary_ops.write_graph_summary(
826        writer._resource, _choose_step(step), tensor, name=name)  # pylint: disable=protected-access
827
828
829_graph = graph  # for functions with a graph parameter
830
831
832@tf_export("summary.import_event", v1=[])
833def import_event(tensor, name=None):
834  """Writes a `tf.Event` binary proto.
835
836  This can be used to import existing event logs into a new summary writer sink.
837  Please note that this is lower level than the other summary functions and
838  will ignore the `tf.summary.should_record_summaries` setting.
839
840  Args:
841    tensor: A `tf.Tensor` of type `string` containing a serialized
842      `tf.Event` proto.
843    name: A name for the operation (optional).
844
845  Returns:
846    The created `tf.Operation`.
847  """
848  return gen_summary_ops.import_event(
849      context.context().summary_writer._resource, tensor, name=name)  # pylint: disable=protected-access
850
851
852@tf_export("summary.flush", v1=[])
853def flush(writer=None, name=None):
854  """Forces summary writer to send any buffered data to storage.
855
856  This operation blocks until that finishes.
857
858  Args:
859    writer: The `tf.summary.SummaryWriter` resource to flush.
860      The thread default will be used if this parameter is None.
861      Otherwise a `tf.no_op` is returned.
862    name: A name for the operation (optional).
863
864  Returns:
865    The created `tf.Operation`.
866  """
867  if writer is None:
868    writer = context.context().summary_writer
869    if writer is None:
870      return control_flow_ops.no_op()
871  if isinstance(writer, ResourceSummaryWriter):
872    resource = writer._resource  # pylint: disable=protected-access
873  else:
874    # Assume we were passed a raw resource tensor.
875    resource = writer
876  with ops.device("cpu:0"):
877    return gen_summary_ops.flush_summary_writer(resource, name=name)
878
879
880_flush_fn = flush  # for within SummaryWriter.flush()
881
882
883def eval_dir(model_dir, name=None):
884  """Construct a logdir for an eval summary writer."""
885  return os.path.join(model_dir, "eval" if not name else "eval_" + name)
886
887
888@deprecation.deprecated(date=None,
889                        instructions="Renamed to create_file_writer().")
890def create_summary_file_writer(*args, **kwargs):
891  """Please use `tf.contrib.summary.create_file_writer`."""
892  logging.warning("Deprecation Warning: create_summary_file_writer was renamed "
893                  "to create_file_writer")
894  return create_file_writer(*args, **kwargs)
895
896
897def _serialize_graph(arbitrary_graph):
898  if isinstance(arbitrary_graph, ops.Graph):
899    return arbitrary_graph.as_graph_def(add_shapes=True).SerializeToString()
900  else:
901    return arbitrary_graph.SerializeToString()
902
903
904def _choose_step(step):
905  if step is None:
906    return training_util.get_or_create_global_step()
907  if not isinstance(step, ops.Tensor):
908    return ops.convert_to_tensor(step, dtypes.int64)
909  return step
910
911
912def _check_create_file_writer_args(inside_function, **kwargs):
913  """Helper to check the validity of arguments to a create_file_writer() call.
914
915  Args:
916    inside_function: whether the create_file_writer() call is in a tf.function
917    **kwargs: the arguments to check, as kwargs to give them names.
918
919  Raises:
920    ValueError: if the arguments are graph tensors.
921  """
922  for arg_name, arg in kwargs.items():
923    if not isinstance(arg, ops.EagerTensor) and tensor_util.is_tensor(arg):
924      if inside_function:
925        raise ValueError(
926            "Invalid graph Tensor argument \"%s=%s\" to create_file_writer() "
927            "inside an @tf.function. The create call will be lifted into the "
928            "outer eager execution context, so it cannot consume graph tensors "
929            "defined inside the function body." % (arg_name, arg))
930      else:
931        raise ValueError(
932            "Invalid graph Tensor argument \"%s=%s\" to eagerly executed "
933            "create_file_writer()." % (arg_name, arg))
934
935
936def run_metadata(name, data, step=None):
937  """Writes entire RunMetadata summary.
938
939  A RunMetadata can contain DeviceStats, partition graphs, and function graphs.
940  Please refer to the proto for definition of each field.
941
942  Args:
943    name: A name for this summary. The summary tag used for TensorBoard will be
944      this name prefixed by any active name scopes.
945    data: A RunMetadata proto to write.
946    step: Explicit `int64`-castable monotonic step value for this summary. If
947      omitted, this defaults to `tf.summary.experimental.get_step()`, which must
948      not be None.
949
950  Returns:
951    True on success, or false if no summary was written because no default
952    summary writer was available.
953
954  Raises:
955    ValueError: if a default writer exists, but no step was provided and
956      `tf.summary.experimental.get_step()` is None.
957  """
958  summary_metadata = summary_pb2.SummaryMetadata()
959  # Hard coding a plugin name. Please refer to go/tb-plugin-name-hardcode for
960  # the rationale.
961  summary_metadata.plugin_data.plugin_name = "graph_run_metadata"
962  # version number = 1
963  summary_metadata.plugin_data.content = b"1"
964
965  with summary_scope(name,
966                     "graph_run_metadata_summary",
967                     [data, step]) as (tag, _):
968    return write(
969        tag=tag,
970        tensor=constant_op.constant(
971            data.SerializeToString(), dtype=dtypes.string),
972        step=step,
973        metadata=summary_metadata)
974
975
976def run_metadata_graphs(name, data, step=None):
977  """Writes graphs from a RunMetadata summary.
978
979  Args:
980    name: A name for this summary. The summary tag used for TensorBoard will be
981      this name prefixed by any active name scopes.
982    data: A RunMetadata proto to write.
983    step: Explicit `int64`-castable monotonic step value for this summary. If
984      omitted, this defaults to `tf.summary.experimental.get_step()`, which must
985      not be None.
986
987  Returns:
988    True on success, or false if no summary was written because no default
989    summary writer was available.
990
991  Raises:
992    ValueError: if a default writer exists, but no step was provided and
993      `tf.summary.experimental.get_step()` is None.
994  """
995  summary_metadata = summary_pb2.SummaryMetadata()
996  # Hard coding a plugin name. Please refer to go/tb-plugin-name-hardcode for
997  # the rationale.
998  summary_metadata.plugin_data.plugin_name = "graph_run_metadata_graph"
999  # version number = 1
1000  summary_metadata.plugin_data.content = b"1"
1001
1002  data = config_pb2.RunMetadata(
1003      function_graphs=data.function_graphs,
1004      partition_graphs=data.partition_graphs)
1005
1006  with summary_scope(name,
1007                     "graph_run_metadata_graph_summary",
1008                     [data, step]) as (tag, _):
1009    return write(
1010        tag=tag,
1011        tensor=constant_op.constant(
1012            data.SerializeToString(), dtype=dtypes.string),
1013        step=step,
1014        metadata=summary_metadata)
1015
1016
1017def keras_model(name, data, step=None):
1018  """Writes a Keras model as JSON to as a Summary.
1019
1020  Writing the Keras model configuration allows the TensorBoard graph plugin to
1021  render a conceptual graph, as opposed to graph of ops. In case the model fails
1022  to serialze as JSON, it ignores and returns False.
1023
1024  Args:
1025    name: A name for this summary. The summary tag used for TensorBoard will be
1026      this name prefixed by any active name scopes.
1027    data: A Keras Model to write.
1028    step: Explicit `int64`-castable monotonic step value for this summary. If
1029      omitted, this defaults to `tf.summary.experimental.get_step()`, which must
1030      not be None.
1031
1032  Returns:
1033    True on success, or False if no summary was written because no default
1034    summary writer was available.
1035
1036  Raises:
1037    ValueError: if a default writer exists, but no step was provided and
1038      `tf.summary.experimental.get_step()` is None.
1039  """
1040  summary_metadata = summary_pb2.SummaryMetadata()
1041  # Hard coding a plugin name. Please refer to go/tb-plugin-name-hardcode for
1042  # the rationale.
1043  summary_metadata.plugin_data.plugin_name = "graph_keras_model"
1044  # version number = 1
1045  summary_metadata.plugin_data.content = b"1"
1046
1047  try:
1048    json_string = data.to_json()
1049  except Exception as exc:  # pylint: disable=broad-except
1050    # An exception should not break a model code.
1051    logging.warn("Model failed to serialize as JSON. Ignoring... %s" % exc)
1052    return False
1053
1054  with summary_scope(name, "graph_keras_model", [data, step]) as (tag, _):
1055    return write(
1056        tag=tag,
1057        tensor=constant_op.constant(json_string, dtype=dtypes.string),
1058        step=step,
1059        metadata=summary_metadata)
1060
1061
1062_TraceContext = collections.namedtuple("TraceContext", ("graph", "profiler"))
1063_current_trace_context_lock = threading.Lock()
1064_current_trace_context = None
1065
1066
1067@tf_export("summary.trace_on", v1=[])
1068def trace_on(graph=True, profiler=False):  # pylint: disable=redefined-outer-name
1069  """Starts a trace to record computation graphs and profiling information.
1070
1071  Must be invoked in eager mode.
1072
1073  When enabled, TensorFlow runtime will collection information that can later be
1074  exported and consumed by TensorBoard. The trace is activated across the entire
1075  TensorFlow runtime and affects all threads of execution.
1076
1077  To stop the trace and export the collected information, use
1078  `tf.summary.trace_export`. To stop the trace without exporting, use
1079  `tf.summary.trace_off`.
1080
1081  Args:
1082    graph: If True, enables collection of executed graphs. It includes ones from
1083        tf.function invocation and ones from the legacy graph mode. The default
1084        is True.
1085    profiler: If True, enables the advanced profiler. Enabling profiler
1086        implicitly enables the graph collection. The profiler may incur a high
1087        memory overhead. The default is False.
1088
1089  """
1090  if ops.inside_function():
1091    logging.warn("Cannot enable trace inside a tf.function.")
1092    return
1093  if not context.context().executing_eagerly():
1094    logging.warn("Must enable trace in eager mode.")
1095    return
1096
1097  global _current_trace_context
1098  with _current_trace_context_lock:
1099    if _current_trace_context:
1100      logging.warn("Trace already enabled")
1101      return
1102
1103    if graph and not profiler:
1104      context.context().enable_graph_collection()
1105    if profiler:
1106      context.context().enable_run_metadata()
1107      _profiler.start()
1108
1109    _current_trace_context = _TraceContext(graph=graph, profiler=profiler)
1110
1111
1112@tf_export("summary.trace_export", v1=[])
1113def trace_export(name, step=None, profiler_outdir=None):
1114  """Stops and exports the active trace as a Summary and/or profile file.
1115
1116  Stops the trace and exports all metadata collected during the trace to the
1117  default SummaryWriter, if one has been set.
1118
1119  Args:
1120    name: A name for the summary to be written.
1121    step: Explicit `int64`-castable monotonic step value for this summary. If
1122      omitted, this defaults to `tf.summary.experimental.get_step()`, which must
1123      not be None.
1124    profiler_outdir: Output directory for profiler. It is required when profiler
1125      is enabled when trace was started. Otherwise, it is ignored.
1126
1127  Raises:
1128    ValueError: if a default writer exists, but no step was provided and
1129      `tf.summary.experimental.get_step()` is None.
1130  """
1131  # TODO(stephanlee): See if we can remove profiler_outdir and infer it from
1132  # the SummaryWriter's logdir.
1133  global _current_trace_context
1134
1135  if ops.inside_function():
1136    logging.warn("Cannot export trace inside a tf.function.")
1137    return
1138  if not context.context().executing_eagerly():
1139    logging.warn("Can only export trace while executing eagerly.")
1140    return
1141
1142  with _current_trace_context_lock:
1143    if _current_trace_context is None:
1144      raise ValueError("Must enable trace before export.")
1145    graph, profiler = _current_trace_context  # pylint: disable=redefined-outer-name
1146    if profiler and profiler_outdir is None:
1147      raise ValueError("Required profiler_outdir is not specified")
1148
1149  run_meta = context.context().export_run_metadata()
1150
1151  if graph and not profiler:
1152    run_metadata_graphs(name, run_meta, step)
1153  else:
1154    run_metadata(name, run_meta, step)
1155
1156  if profiler:
1157    _profiler.save(profiler_outdir, _profiler.stop())
1158
1159  trace_off()
1160
1161
1162@tf_export("summary.trace_off", v1=[])
1163def trace_off():
1164  """Stops the current trace and discards any collected information."""
1165  global _current_trace_context
1166  with _current_trace_context_lock:
1167    _current_trace_context = None
1168
1169  # Disabling run_metadata disables graph collection as well.
1170  context.context().disable_run_metadata()
1171
1172  # profiler only has start and stop. One needs to stop in order to export
1173  # and stopping when it is not running will raise an error.
1174  try:
1175    _profiler.stop()
1176  except _profiler.ProfilerNotRunningError:
1177    pass
1178