/external/python/cpython2/Lib/email/ |
D | feedparser.py | 164 self._input = BufferedSubFile() 177 self._input.push(data) 188 self._input.close() 222 for line in self._input: 231 self._input.unreadline(line) 243 line = self._input.readline() 259 self._input.push_eof_matcher(NLCRE.match) 269 self._input.pop_eof_matcher() 275 line = self._input.readline() 281 line = self._input.readline() [all …]
|
/external/python/cpython3/Lib/email/ |
D | feedparser.py | 162 self._input = BufferedSubFile() 175 self._input.push(data) 186 self._input.close() 224 for line in self._input: 235 self._input.unreadline(line) 247 line = self._input.readline() 263 self._input.push_eof_matcher(NLCRE.match) 273 self._input.pop_eof_matcher() 279 line = self._input.readline() 285 line = self._input.readline() [all …]
|
/external/libxcam/modules/isp/ |
D | aiq_handler.cpp | 344 xcam_mem_clear (_input); in AiqAeHandler() 345 _input.num_exposures = 1; in AiqAeHandler() 346 _input.frame_use = _aiq_compositor->get_frame_use(); in AiqAeHandler() 347 _input.flash_mode = ia_aiq_flash_mode_off; in AiqAeHandler() 348 _input.operation_mode = ia_aiq_ae_operation_mode_automatic; in AiqAeHandler() 349 _input.metering_mode = ia_aiq_ae_metering_mode_evaluative; in AiqAeHandler() 350 _input.priority_mode = ia_aiq_ae_priority_mode_normal; in AiqAeHandler() 351 _input.flicker_reduction_mode = ia_aiq_ae_flicker_reduction_auto; in AiqAeHandler() 352 _input.sensor_descriptor = NULL; in AiqAeHandler() 353 _input.exposure_window = NULL; in AiqAeHandler() [all …]
|
D | aiq_handler.h | 150 ia_aiq_ae_input_params _input; variable 188 ia_aiq_awb_input_params _input; variable
|
/external/tensorflow/tensorflow/python/kernel_tests/ |
D | segment_reduction_ops_test.py | 38 def _input(self, input_shape, dtype=dtypes_lib.int32): member in SegmentReductionHelper 118 tf_x, np_x = self._input(shape, dtype=dtype) 133 tf_x, _ = self._input(shape) 143 tf_x, _ = self._input(shape) 155 tf_x, _ = self._input(shape, dtype=dtypes_lib.float32) 164 tf_x, np_x = self._input(shape, dtype=dtypes_lib.float32) 175 tf_x, np_x = self._input(shape, dtype=dtypes_lib.float32) 186 tf_x, _ = self._input(shape) 198 tf_x, _ = self._input(shape) 208 tf_x, _ = self._input(shape) [all …]
|
/external/antlr/runtime/CSharp2/Sources/Antlr3.Runtime/Antlr.Runtime/ |
D | RecognitionException.cs | 77 private IIntStream _input; field in Antlr.Runtime.RecognitionException 145 this._input = input; in RecognitionException() 180 if (_input is ITokenStream) { 184 ITreeNodeStream treeNodeStream = _input as ITreeNodeStream; 205 return _input; 208 _input = value;
|
/external/antlr/runtime/CSharp3/Sources/Antlr3.Runtime/ |
D | RecognitionException.cs | 79 private IIntStream _input; field in Antlr.Runtime.RecognitionException 173 this._input = input; in RecognitionException() 236 if ( _input is ITokenStream ) 241 ITreeNodeStream treeNodeStream = _input as ITreeNodeStream; 268 return _input; 272 _input = value;
|
/external/libxcam/modules/ocl/ |
D | cl_geo_map_handler.cpp | 259 …_input[NV12PlaneYIdx] = convert_to_climage (context, input, cl_desc, in_info.offsets[NV12PlaneYIdx… in prepare_parameters() 266 …_input[NV12PlaneUVIdx] = convert_to_climage (context, input, cl_desc, in_info.offsets[NV12PlaneUVI… in prepare_parameters() 279 _input[NV12PlaneYIdx].ptr () && _input[NV12PlaneYIdx]->is_valid () && in prepare_parameters() 280 _input[NV12PlaneUVIdx].ptr () && _input[NV12PlaneUVIdx]->is_valid () && in prepare_parameters() 313 _input[i].release (); in execute_done()
|
D | cl_fisheye_handler.cpp | 249 …_input[NV12PlaneYIdx] = convert_to_climage (context, input, cl_desc, in_info.offsets[NV12PlaneYIdx… in prepare_parameters() 256 …_input[NV12PlaneUVIdx] = convert_to_climage (context, input, cl_desc, in_info.offsets[NV12PlaneUVI… in prepare_parameters() 281 _input[NV12PlaneYIdx].ptr () && _input[NV12PlaneYIdx]->is_valid () && in prepare_parameters() 282 _input[NV12PlaneUVIdx].ptr () && _input[NV12PlaneUVIdx]->is_valid () && in prepare_parameters() 534 _input[i].release (); in execute_done()
|
D | cl_geo_map_handler.h | 103 return _input [index]; in get_geo_input_image() 144 SmartPtr<CLImage> _input[NV12PlaneMax]; variable
|
D | cl_fisheye_handler.h | 112 return _input [index]; in get_input_image() 153 SmartPtr<CLImage> _input[NV12PlaneMax]; variable
|
/external/v8/gnparser/ |
D | gnParser.py | 217 la_ = self._interp.adaptivePredict(self._input,0,self._ctx) 283 la_ = self._interp.adaptivePredict(self._input,1,self._ctx) 406 _la = self._input.LA(1) 416 la_ = self._interp.adaptivePredict(self._input,3,self._ctx) 488 _la = self._input.LA(1) 494 token = self._input.LA(1) 603 _la = self._input.LA(1) 607 token = self._input.LA(1) 621 _la = self._input.LA(1) 770 self._ctx.stop = self._input.LT(-1) [all …]
|
/external/antlr/runtime/ActionScript/project/src/org/antlr/runtime/ |
D | CommonToken.as | 37 protected var _input:CharStream; 61 token._input = input; 93 if ( _input==null ) { 96 _text = _input.substring(_start, _stop); 158 return _input; 162 _input = input;
|
/external/bcc/examples/usdt_sample/usdt_sample_lib1/include/usdt_sample_lib1/ |
D | lib1.h | 16 const std::string& input() const { return _input; } in input() 19 std::string _input;
|
/external/parameter-framework/upstream/utility/ |
D | Tokenizer.cpp | 38 : _input(input), _delimiters(delimiters), _mergeDelimiters(mergeDelimiters) in Tokenizer() 48 for (const auto character : _input) { in split()
|
D | Tokenizer.h | 66 const std::string _input; //< string to be tokenized
|
/external/tensorflow/tensorflow/python/debug/cli/ |
D | readline_ui.py | 44 self._input = six.moves.input 82 return self._input(self.CLI_PROMPT).strip()
|
/external/toolchain-utils/automation/common/ |
D | command.py | 146 self._input = kwargs.get('input', None) 156 if self._input: 157 pipe.insert(str(Shell('cat', self._input), 0))
|
/external/walt/ios/WALT/ |
D | MIDIClient.m | 164 MIDIPortRef _input; 191 if (!_input) { // Lazily create the input port. 196 &_input); 206 result = MIDIPortConnectSource(_input, source.endpoint, (__bridge void *)self);
|
/external/python/cpython3/Lib/ |
D | subprocess.py | 672 self._input = None 1665 if self._input: 1666 input_view = memoryview(self._input) 1697 if self._input_offset >= len(self._input): 1734 if self.stdin and self._input is None: 1736 self._input = input 1738 self._input = self._input.encode(self.stdin.encoding,
|
/external/tensorflow/tensorflow/core/kernels/hexagon/ |
D | graph_transferer_test.cc | 104 auto _input = ops::AsNodeOut(scope, input); in BuildConv2DOps() local 111 .Input(_input) in BuildConv2DOps() 128 auto _input = ops::AsNodeOut(scope, input); in BuildMaxPoolOps() local 133 .Input(_input) in BuildMaxPoolOps()
|
/external/bcc/examples/usdt_sample/usdt_sample_lib1/src/ |
D | lib1.cpp | 16 : _input(input_) in OperationRequest()
|
/external/tensorflow/tensorflow/python/keras/utils/ |
D | tf_utils_test.py | 89 self._input = input_
|
/external/doclava/src/com/google/doclava/parser/ |
D | JavaParser.java | 16035 public int specialStateTransition(int s, IntStream _input) throws NoViableAltException { in specialStateTransition() argument 16036 TokenStream input = (TokenStream)_input; in specialStateTransition() 16130 public int specialStateTransition(int s, IntStream _input) throws NoViableAltException { in specialStateTransition() argument 16131 TokenStream input = (TokenStream)_input; in specialStateTransition() 16459 public int specialStateTransition(int s, IntStream _input) throws NoViableAltException { in specialStateTransition() argument 16460 TokenStream input = (TokenStream)_input; in specialStateTransition() 16718 public int specialStateTransition(int s, IntStream _input) throws NoViableAltException { in specialStateTransition() argument 16719 TokenStream input = (TokenStream)_input; in specialStateTransition() 16986 public int specialStateTransition(int s, IntStream _input) throws NoViableAltException { in specialStateTransition() argument 16987 TokenStream input = (TokenStream)_input; in specialStateTransition() [all …]
|
/external/tensorflow/tensorflow/python/training/ |
D | training.py | 52 from tensorflow.python.training import input as _input unknown
|