• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15"""Estimator regression tests."""
16
17from __future__ import absolute_import
18from __future__ import division
19from __future__ import print_function
20
21import random
22
23from tensorflow.contrib.framework.python.ops import variables
24from tensorflow.contrib.layers.python.layers import feature_column
25from tensorflow.contrib.learn.python.learn.datasets import base
26from tensorflow.contrib.learn.python.learn.estimators import dnn
27from tensorflow.contrib.learn.python.learn.estimators import linear
28from tensorflow.contrib.learn.python.learn.estimators import run_config
29from tensorflow.contrib.learn.python.learn.learn_io import data_feeder
30from tensorflow.python.framework import ops
31from tensorflow.python.ops import control_flow_ops
32from tensorflow.python.ops import random_ops
33from tensorflow.python.platform import test
34from tensorflow.python.training import optimizer as optimizer_lib
35
36
37def _get_input_fn(x, y, batch_size=None):
38  df = data_feeder.setup_train_data_feeder(
39      x, y, n_classes=None, batch_size=batch_size)
40  return df.input_builder, df.get_feed_dict_fn()
41
42
43# We use a null optimizer since we can't get deterministic results out of
44# supervisor's multiple threads.
45class _NullOptimizer(optimizer_lib.Optimizer):
46
47  def __init__(self):
48    super(_NullOptimizer, self).__init__(use_locking=False, name='Null')
49
50  def _apply_dense(self, grad, var):
51    return control_flow_ops.no_op()
52
53  def _apply_sparse(self, grad, var):
54    return control_flow_ops.no_op()
55
56  def _prepare(self):
57    pass
58
59
60_NULL_OPTIMIZER = _NullOptimizer()
61
62
63class StabilityTest(test.TestCase):
64  """Tests that estiamtors are reproducible."""
65
66  def testRandomStability(self):
67    my_seed = 42
68    minval = -0.3333
69    maxval = 0.3333
70    with ops.Graph().as_default() as g:
71      with self.session(graph=g) as session:
72        g.seed = my_seed
73        x = random_ops.random_uniform([10, 10], minval=minval, maxval=maxval)
74        val1 = session.run(x)
75    with ops.Graph().as_default() as g:
76      with self.session(graph=g) as session:
77        g.seed = my_seed
78        x = random_ops.random_uniform([10, 10], minval=minval, maxval=maxval)
79        val2 = session.run(x)
80    self.assertAllClose(val1, val2)
81
82  def testLinearRegression(self):
83    my_seed = 42
84    config = run_config.RunConfig(tf_random_seed=my_seed)
85    boston = base.load_boston()
86    columns = [feature_column.real_valued_column('', dimension=13)]
87
88    # We train with
89
90    with ops.Graph().as_default() as g1:
91      random.seed(my_seed)
92      g1.seed = my_seed
93      variables.create_global_step()
94      regressor1 = linear.LinearRegressor(
95          optimizer=_NULL_OPTIMIZER, feature_columns=columns, config=config)
96      regressor1.fit(x=boston.data, y=boston.target, steps=1)
97
98    with ops.Graph().as_default() as g2:
99      random.seed(my_seed)
100      g2.seed = my_seed
101      variables.create_global_step()
102      regressor2 = linear.LinearRegressor(
103          optimizer=_NULL_OPTIMIZER, feature_columns=columns, config=config)
104      regressor2.fit(x=boston.data, y=boston.target, steps=1)
105
106    variable_names = regressor1.get_variable_names()
107    self.assertIn('linear//weight', variable_names)
108    self.assertIn('linear/bias_weight', variable_names)
109    regressor1_weights = regressor1.get_variable_value('linear//weight')
110    regressor2_weights = regressor2.get_variable_value('linear//weight')
111    regressor1_bias = regressor1.get_variable_value('linear/bias_weight')
112    regressor2_bias = regressor2.get_variable_value('linear/bias_weight')
113    self.assertAllClose(regressor1_weights, regressor2_weights)
114    self.assertAllClose(regressor1_bias, regressor2_bias)
115    self.assertAllClose(
116        list(regressor1.predict_scores(
117            boston.data, as_iterable=True)),
118        list(regressor2.predict_scores(
119            boston.data, as_iterable=True)),
120        atol=1e-05)
121
122  def testDNNRegression(self):
123    my_seed = 42
124    config = run_config.RunConfig(tf_random_seed=my_seed)
125    boston = base.load_boston()
126    columns = [feature_column.real_valued_column('', dimension=13)]
127
128    with ops.Graph().as_default() as g1:
129      random.seed(my_seed)
130      g1.seed = my_seed
131      variables.create_global_step()
132      regressor1 = dnn.DNNRegressor(
133          hidden_units=[10],
134          feature_columns=columns,
135          optimizer=_NULL_OPTIMIZER,
136          config=config)
137      regressor1.fit(x=boston.data, y=boston.target, steps=1)
138
139    with ops.Graph().as_default() as g2:
140      random.seed(my_seed)
141      g2.seed = my_seed
142      variables.create_global_step()
143      regressor2 = dnn.DNNRegressor(
144          hidden_units=[10],
145          feature_columns=columns,
146          optimizer=_NULL_OPTIMIZER,
147          config=config)
148      regressor2.fit(x=boston.data, y=boston.target, steps=1)
149
150    weights1 = ([regressor1.get_variable_value('dnn/hiddenlayer_0/weights')] +
151                [regressor1.get_variable_value('dnn/logits/weights')])
152    weights2 = ([regressor2.get_variable_value('dnn/hiddenlayer_0/weights')] +
153                [regressor2.get_variable_value('dnn/logits/weights')])
154    for w1, w2 in zip(weights1, weights2):
155      self.assertAllClose(w1, w2)
156
157    biases1 = ([regressor1.get_variable_value('dnn/hiddenlayer_0/biases')] +
158               [regressor1.get_variable_value('dnn/logits/biases')])
159    biases2 = ([regressor2.get_variable_value('dnn/hiddenlayer_0/biases')] +
160               [regressor2.get_variable_value('dnn/logits/biases')])
161    for b1, b2 in zip(biases1, biases2):
162      self.assertAllClose(b1, b2)
163    self.assertAllClose(
164        list(regressor1.predict_scores(
165            boston.data, as_iterable=True)),
166        list(regressor2.predict_scores(
167            boston.data, as_iterable=True)),
168        atol=1e-05)
169
170
171if __name__ == '__main__':
172  test.main()
173