Home
last modified time | relevance | path

Searched refs:end_token (Results 1 – 15 of 15) sorted by relevance

/external/tensorflow/tensorflow/contrib/seq2seq/python/kernel_tests/
Dbeam_search_ops_test.py39 end_token = 10
51 end_token=end_token)
58 end_token = 10
71 end_token=end_token)
81 end_token = 10
94 end_token=end_token)
102 end_token = 5
106 0, high=end_token + 1, size=(max_time, batch_size, beam_width))
114 end_token=end_token)
121 self.assertAllClose(b_value, end_token * np.ones_like(b_value))
[all …]
Dbeam_search_decoder_test.py70 end_token=11)
261 self.end_token = 0
297 end_token=self.end_token,
354 end_token=self.end_token,
390 self.end_token = 0
455 end_token=self.end_token,
488 end_token = vocab_size - 1
530 end_token=end_token,
602 end_token = vocab_size - 1
652 end_token=end_token,
Dbasic_decoder_v2_test.py136 end_token = 1
152 end_token=end_token,
194 expected_step_finished = (expected_sample_ids == end_token)
213 end_token = 1
228 end_token=end_token,
270 expected_step_finished = (sample_ids == end_token)
519 end_token = 6
530 end_fn = lambda sample_ids: math_ops.equal(sample_ids, end_token)
585 expected_step_finished = (sample_ids == end_token)
598 end_token = 6
[all …]
Dbasic_decoder_test.py134 end_token = 1
141 end_token)
188 expected_step_finished = (expected_sample_ids == end_token)
207 end_token = 1
217 end_token, seed=0)
264 expected_step_finished = (sample_ids == end_token)
510 end_token = 6
521 end_fn = lambda sample_ids: math_ops.equal(sample_ids, end_token)
578 expected_step_finished = (sample_ids == end_token)
591 end_token = 6
[all …]
/external/tensorflow/tensorflow/contrib/seq2seq/kernels/
Dbeam_search_ops.cc53 const Tensor& end_token = ctx->input(3); in Compute() local
64 ctx, TensorShapeUtils::IsScalar(end_token.shape()), in Compute()
66 end_token.shape().DebugString())); in Compute()
87 typename TTypes<T>::ConstScalar end_token_t(end_token.scalar<T>()); in Compute()
111 const int32 end_token, TTypes<int32, 3>::Tensor beams) { in operator ()()
115 beams.setConstant(end_token); in operator ()()
117 auto DoWork = [&, ctx, end_token](int start_batch_beam, in operator ()()
147 beams(time, batch, beam) = end_token; in operator ()()
148 } else if (beams(time, batch, beam) == end_token) { in operator ()()
176 TTypes<int32>::ConstVec max_sequence_lengths, const T end_token, \
Dbeam_search_ops_gpu.cu.cc33 const T end_token, T* beams) { in GatherTreeOpKernel() argument
70 beams[level_beam_ix] = end_token; in GatherTreeOpKernel()
71 } else if (beams[level_beam_ix] == end_token) { in GatherTreeOpKernel()
86 const T end_token, typename TTypes<T, 3>::Tensor beams) { in operator ()()
91 beams.device(d) = beams.constant(end_token); in operator ()()
97 parent_ids.data(), max_sequence_length.data(), end_token, in operator ()()
Dbeam_search_ops.h34 const T end_token, typename TTypes<T, 3>::Tensor beams);
/external/tensorflow/tensorflow/contrib/seq2seq/python/ops/
Dbeam_search_decoder.py158 end_token=beam_width + 1)
360 end_token=self._end_token)
550 end_token = self._end_token
576 end_token=end_token,
635 end_token, argument
690 end_token, dtype=dtypes.int32, name="end_token")
846 end_token, argument
874 end_token, dtype=dtypes.int32, name="end_token")
927 def call(self, embeddning, start_tokens, end_token, initial_state, **kwargs): argument
930 init_kwargs["end_token"] = end_token
[all …]
Dsampler.py530 def initialize(self, embedding, start_tokens=None, end_token=None): argument
553 self.end_token = ops.convert_to_tensor(
554 end_token, dtype=dtypes.int32, name="end_token")
558 if self.end_token.get_shape().ndims != 0:
578 finished = math_ops.equal(sample_ids, self.end_token)
Dhelper.py561 def __init__(self, embedding, start_tokens, end_token): argument
584 end_token, dtype=dtypes.int32, name="end_token")
638 def __init__(self, embedding, start_tokens, end_token, argument
660 embedding, start_tokens, end_token)
/external/tensorflow/tensorflow/contrib/seq2seq/ops/
Dbeam_search_ops.cc33 ShapeHandle step_ids, parent_ids, max_sequence_lengths, end_token; in __anon559769010102() local
41 TF_RETURN_IF_ERROR(c->WithRank(c->input(3), 0, &end_token)); in __anon559769010102()
/external/protobuf/python/google/protobuf/
Dtext_format.py580 end_token = '>'
583 end_token = '}'
600 while not tokenizer.TryConsume(end_token):
602 raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token,))
/external/libtextclassifier/annotator/
Dfeature-processor.cc341 TokenIndex end_token = kInvalidIndex; in CodepointSpanToTokenSpan() local
355 end_token = i + 1; in CodepointSpanToTokenSpan()
358 return {start_token, end_token}; in CodepointSpanToTokenSpan()
/external/v8/src/parsing/
Dparser-base.h1225 V8_INLINE void ParseStatementList(StatementListT body, Token::Value end_token, in ParseStatementList() argument
1227 LazyParsingResult result = ParseStatementList(body, end_token, false, ok); in ParseStatementList()
1232 Token::Value end_token,
4865 Token::Value end_token, bool may_abort, in ParseStatementList() argument
4880 while (peek() != end_token) { in ParseStatementList()
/external/tensorflow/tensorflow/compiler/xla/service/
Dhlo_parser.cc3020 const auto end_token = in ParseWindow() local
3022 while (lexer_.GetKind() != end_token) { in ParseWindow()