• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15
16"""For seeding individual ops based on a graph-level seed.
17"""
18
19import weakref
20
21from tensorflow.python.eager import context
22from tensorflow.python.framework import config
23from tensorflow.python.framework import ops
24from tensorflow.python.util import deprecation
25from tensorflow.python.util.tf_export import tf_export
26
27
28DEFAULT_GRAPH_SEED = 87654321
29_MAXINT32 = 2**31 - 1
30
31_graph_to_seed_dict = weakref.WeakKeyDictionary()
32
33
34def _truncate_seed(seed):
35  return seed % _MAXINT32  # Truncate to fit into 32-bit integer
36
37
38@tf_export(v1=['random.get_seed', 'get_seed'])
39@deprecation.deprecated_endpoints('get_seed')
40def get_seed(op_seed):
41  """Returns the local seeds an operation should use given an op-specific seed.
42
43  Given operation-specific seed, `op_seed`, this helper function returns two
44  seeds derived from graph-level and op-level seeds. Many random operations
45  internally use the two seeds to allow user to change the seed globally for a
46  graph, or for only specific operations.
47
48  For details on how the graph-level seed interacts with op seeds, see
49  `tf.compat.v1.random.set_random_seed`.
50
51  Args:
52    op_seed: integer.
53
54  Returns:
55    A tuple of two integers that should be used for the local seed of this
56    operation.
57  """
58  eager = context.executing_eagerly()
59
60  if eager:
61    global_seed = context.global_seed()
62  else:
63    global_seed = ops.get_default_graph().seed
64
65  if global_seed is not None:
66    if op_seed is None:
67      # pylint: disable=protected-access
68      if hasattr(ops.get_default_graph(), '_seed_used'):
69        ops.get_default_graph()._seed_used = True
70      if eager:
71        op_seed = context.internal_operation_seed()
72      else:
73        op_seed = _graph_to_seed_dict.setdefault(ops.get_default_graph(), 0)
74        _graph_to_seed_dict[ops.get_default_graph()] += 1
75
76    seeds = _truncate_seed(global_seed), _truncate_seed(op_seed)
77  else:
78    if op_seed is not None:
79      seeds = DEFAULT_GRAPH_SEED, _truncate_seed(op_seed)
80    else:
81      seeds = None, None
82
83  if seeds == (None, None) and config.is_op_determinism_enabled():
84    raise RuntimeError(  # pylint: disable=g-doc-exception
85        'Random ops require a seed to be set when determinism is enabled. '
86        'Please set a seed before running the op, e.g. by calling '
87        'tf.random.set_seed(1).')
88
89  # Avoid (0, 0) as the C++ ops interpret it as nondeterminism, which would
90  # be unexpected since Python docs say nondeterminism is (None, None).
91  if seeds == (0, 0):
92    return (0, _MAXINT32)
93  return seeds
94
95
96@tf_export(v1=['random.set_random_seed', 'set_random_seed'])
97def set_random_seed(seed):
98  """Sets the graph-level random seed for the default graph.
99
100  Operations that rely on a random seed actually derive it from two seeds:
101  the graph-level and operation-level seeds. This sets the graph-level seed.
102
103  Its interactions with operation-level seeds is as follows:
104
105    1. If neither the graph-level nor the operation seed is set:
106      A random seed is used for this op.
107    2. If the graph-level seed is set, but the operation seed is not:
108      The system deterministically picks an operation seed in conjunction with
109      the graph-level seed so that it gets a unique random sequence. Within the
110      same version of tensorflow and user code, this sequence is deterministic.
111      However across different versions, this sequence might change. If the
112      code depends on particular seeds to work, specify both graph-level
113      and operation-level seeds explicitly.
114    3. If the graph-level seed is not set, but the operation seed is set:
115      A default graph-level seed and the specified operation seed are used to
116      determine the random sequence.
117    4. If both the graph-level and the operation seed are set:
118      Both seeds are used in conjunction to determine the random sequence.
119
120  To illustrate the user-visible effects, consider these examples:
121
122  To generate different sequences across sessions, set neither
123  graph-level nor op-level seeds:
124
125  ```python
126  a = tf.random.uniform([1])
127  b = tf.random.normal([1])
128
129  print("Session 1")
130  with tf.compat.v1.Session() as sess1:
131    print(sess1.run(a))  # generates 'A1'
132    print(sess1.run(a))  # generates 'A2'
133    print(sess1.run(b))  # generates 'B1'
134    print(sess1.run(b))  # generates 'B2'
135
136  print("Session 2")
137  with tf.compat.v1.Session() as sess2:
138    print(sess2.run(a))  # generates 'A3'
139    print(sess2.run(a))  # generates 'A4'
140    print(sess2.run(b))  # generates 'B3'
141    print(sess2.run(b))  # generates 'B4'
142  ```
143
144  To generate the same repeatable sequence for an op across sessions, set the
145  seed for the op:
146
147  ```python
148  a = tf.random.uniform([1], seed=1)
149  b = tf.random.normal([1])
150
151  # Repeatedly running this block with the same graph will generate the same
152  # sequence of values for 'a', but different sequences of values for 'b'.
153  print("Session 1")
154  with tf.compat.v1.Session() as sess1:
155    print(sess1.run(a))  # generates 'A1'
156    print(sess1.run(a))  # generates 'A2'
157    print(sess1.run(b))  # generates 'B1'
158    print(sess1.run(b))  # generates 'B2'
159
160  print("Session 2")
161  with tf.compat.v1.Session() as sess2:
162    print(sess2.run(a))  # generates 'A1'
163    print(sess2.run(a))  # generates 'A2'
164    print(sess2.run(b))  # generates 'B3'
165    print(sess2.run(b))  # generates 'B4'
166  ```
167
168  To make the random sequences generated by all ops be repeatable across
169  sessions, set a graph-level seed:
170
171  ```python
172  tf.compat.v1.random.set_random_seed(1234)
173  a = tf.random.uniform([1])
174  b = tf.random.normal([1])
175
176  # Repeatedly running this block with the same graph will generate the same
177  # sequences of 'a' and 'b'.
178  print("Session 1")
179  with tf.compat.v1.Session() as sess1:
180    print(sess1.run(a))  # generates 'A1'
181    print(sess1.run(a))  # generates 'A2'
182    print(sess1.run(b))  # generates 'B1'
183    print(sess1.run(b))  # generates 'B2'
184
185  print("Session 2")
186  with tf.compat.v1.Session() as sess2:
187    print(sess2.run(a))  # generates 'A1'
188    print(sess2.run(a))  # generates 'A2'
189    print(sess2.run(b))  # generates 'B1'
190    print(sess2.run(b))  # generates 'B2'
191  ```
192
193  @compatibility(TF2)
194  'tf.compat.v1.set_random_seed' is compatible with eager mode. However,
195  in eager mode this API will set the global seed instead of the
196  graph-level seed of the default graph. In TF2 this API is changed to
197  [tf.random.set_seed]
198  (https://www.tensorflow.org/api_docs/python/tf/random/set_seed).
199  @end_compatibility
200
201  Args:
202    seed: integer.
203  """
204  if context.executing_eagerly():
205    context.set_global_seed(seed)
206  else:
207    ops.get_default_graph().seed = seed
208
209
210@tf_export('random.set_seed', v1=[])
211def set_seed(seed):
212  """Sets the global random seed.
213
214  Operations that rely on a random seed actually derive it from two seeds:
215  the global and operation-level seeds. This sets the global seed.
216
217  Its interactions with operation-level seeds is as follows:
218
219    1. If neither the global seed nor the operation seed is set: A randomly
220      picked seed is used for this op.
221    2. If the global seed is set, but the operation seed is not:
222      The system deterministically picks an operation seed in conjunction with
223      the global seed so that it gets a unique random sequence. Within the
224      same version of tensorflow and user code, this sequence is deterministic.
225      However across different versions, this sequence might change. If the
226      code depends on particular seeds to work, specify both global
227      and operation-level seeds explicitly.
228    3. If the operation seed is set, but the global seed is not set:
229      A default global seed and the specified operation seed are used to
230      determine the random sequence.
231    4. If both the global and the operation seed are set:
232      Both seeds are used in conjunction to determine the random sequence.
233
234  To illustrate the user-visible effects, consider these examples:
235
236  If neither the global seed nor the operation seed is set, we get different
237  results for every call to the random op and every re-run of the program:
238
239  ```python
240  print(tf.random.uniform([1]))  # generates 'A1'
241  print(tf.random.uniform([1]))  # generates 'A2'
242  ```
243
244  (now close the program and run it again)
245
246  ```python
247  print(tf.random.uniform([1]))  # generates 'A3'
248  print(tf.random.uniform([1]))  # generates 'A4'
249  ```
250
251  If the global seed is set but the operation seed is not set, we get different
252  results for every call to the random op, but the same sequence for every
253  re-run of the program:
254
255  ```python
256  tf.random.set_seed(1234)
257  print(tf.random.uniform([1]))  # generates 'A1'
258  print(tf.random.uniform([1]))  # generates 'A2'
259  ```
260
261  (now close the program and run it again)
262
263  ```python
264  tf.random.set_seed(1234)
265  print(tf.random.uniform([1]))  # generates 'A1'
266  print(tf.random.uniform([1]))  # generates 'A2'
267  ```
268
269  The reason we get 'A2' instead 'A1' on the second call of `tf.random.uniform`
270  above is because the second call uses a different operation seed.
271
272  Note that `tf.function` acts like a re-run of a program in this case. When
273  the global seed is set but operation seeds are not set, the sequence of random
274  numbers are the same for each `tf.function`. For example:
275
276  ```python
277  tf.random.set_seed(1234)
278
279  @tf.function
280  def f():
281    a = tf.random.uniform([1])
282    b = tf.random.uniform([1])
283    return a, b
284
285  @tf.function
286  def g():
287    a = tf.random.uniform([1])
288    b = tf.random.uniform([1])
289    return a, b
290
291  print(f())  # prints '(A1, A2)'
292  print(g())  # prints '(A1, A2)'
293  ```
294
295  If the operation seed is set, we get different results for every call to the
296  random op, but the same sequence for every re-run of the program:
297
298  ```python
299  print(tf.random.uniform([1], seed=1))  # generates 'A1'
300  print(tf.random.uniform([1], seed=1))  # generates 'A2'
301  ```
302
303  (now close the program and run it again)
304
305  ```python
306  print(tf.random.uniform([1], seed=1))  # generates 'A1'
307  print(tf.random.uniform([1], seed=1))  # generates 'A2'
308  ```
309
310  The reason we get 'A2' instead 'A1' on the second call of `tf.random.uniform`
311  above is because the same `tf.random.uniform` kernel (i.e. internal
312  representation) is used by TensorFlow for all calls of it with the same
313  arguments, and the kernel maintains an internal counter which is incremented
314  every time it is executed, generating different results.
315
316  Calling `tf.random.set_seed` will reset any such counters:
317
318  ```python
319  tf.random.set_seed(1234)
320  print(tf.random.uniform([1], seed=1))  # generates 'A1'
321  print(tf.random.uniform([1], seed=1))  # generates 'A2'
322  tf.random.set_seed(1234)
323  print(tf.random.uniform([1], seed=1))  # generates 'A1'
324  print(tf.random.uniform([1], seed=1))  # generates 'A2'
325  ```
326
327  When multiple identical random ops are wrapped in a `tf.function`, their
328  behaviors change because the ops no long share the same counter. For example:
329
330  ```python
331  @tf.function
332  def foo():
333    a = tf.random.uniform([1], seed=1)
334    b = tf.random.uniform([1], seed=1)
335    return a, b
336  print(foo())  # prints '(A1, A1)'
337  print(foo())  # prints '(A2, A2)'
338
339  @tf.function
340  def bar():
341    a = tf.random.uniform([1])
342    b = tf.random.uniform([1])
343    return a, b
344  print(bar())  # prints '(A1, A2)'
345  print(bar())  # prints '(A3, A4)'
346  ```
347
348  The second call of `foo` returns '(A2, A2)' instead of '(A1, A1)' because
349  `tf.random.uniform` maintains an internal counter. If you want `foo` to return
350  '(A1, A1)' every time, use the stateless random ops such as
351  `tf.random.stateless_uniform`. Also see `tf.random.experimental.Generator` for
352  a new set of stateful random ops that use external variables to manage their
353  states.
354
355  Args:
356    seed: integer.
357  """
358  set_random_seed(seed)
359