Home
last modified time | relevance | path

Searched refs:tokenize (Results 1 – 25 of 65) sorted by relevance

123

/external/chromium_org/third_party/sqlite/src/test/
Dtokenize.test14 # $Id: tokenize.test,v 1.1 2008/07/08 00:06:51 drh Exp $
20 do_test tokenize-1.1 {
23 do_test tokenize-1.2 {
26 do_test tokenize-1.3 {
29 do_test tokenize-1.4 {
32 do_test tokenize-1.5 {
35 do_test tokenize-1.6 {
38 do_test tokenize-1.7 {
41 do_test tokenize-1.8 {
44 do_test tokenize-1.9 {
[all …]
Dfts3ad.test29 CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize porter);
67 CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize porter);
75 CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize= porter);
83 CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize= simple);
91 CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize= porter);
99 CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize = porter);
Dfts3aa.test218 CREATE VIRTUAL TABLE t3 USING fts3(tokenize=simple, tokenize=simple);
219 SELECT tokenize FROM t3;
222 CREATE VIRTUAL TABLE t4 USING fts4(tokenize=simple, tokenize=simple);
223 } {1 {unrecognized parameter: tokenize=simple}}
Dfts2l.test34 execsql "CREATE VIRTUAL TABLE t2 USING fts2(content, tokenize \x80)"
41 execsql "CREATE VIRTUAL TABLE t3 USING fts2(content, tokenize\x80)"
Dfts1k.test34 execsql "CREATE VIRTUAL TABLE t2 USING fts1(content, tokenize \x80)"
41 execsql "CREATE VIRTUAL TABLE t3 USING fts1(content, tokenize\x80)"
/external/chromium/testing/gmock/scripts/generator/cpp/
Dast.py46 from cpp import tokenize
549 if parts[-1].token_type == tokenize.NAME:
579 if (type_name and type_name[-1].token_type == tokenize.NAME and
580 p.token_type == tokenize.NAME):
581 type_name.append(tokenize.Token(tokenize.SYNTAX, ' ', 0, 0))
738 if token.token_type == tokenize.NAME:
749 if next.token_type == tokenize.SYNTAX and next.name == '(':
754 syntax = tokenize.SYNTAX
763 new_temp = self._GetTokensUpTo(tokenize.SYNTAX, ';')
766 last_token = tokenize.Token(tokenize.SYNTAX, ';', 0, 0)
[all …]
/external/chromium_org/third_party/WebKit/Source/devtools/front_end/cm/
Dxml.js52 state.tokenize = parser;
72 state.tokenize = inBlock("meta", "?>");
81 state.tokenize = inTag;
105 state.tokenize = inText;
114 state.tokenize = inAttribute(ch);
116 return state.tokenize(stream, state);
127 state.tokenize = inTag;
141 state.tokenize = inText;
154 state.tokenize = doctype(depth + 1);
155 return state.tokenize(stream, state);
[all …]
Dcss.js31 state.tokenize = tokenString(ch);
32 return state.tokenize(stream, state);
68 state.tokenize = tokenParenthesized;
87 state.tokenize = tokenBase;
96 state.tokenize = tokenString(")", true);
98 state.tokenize = tokenBase;
104 return {tokenize: tokenBase,
158 state.tokenize = state.tokenize || tokenBase;
159 if (state.tokenize == tokenBase && stream.eatSpace()) return null;
160 var style = state.tokenize(stream, state);
[all …]
Dcoffeescript.js72 state.tokenize = longComment;
73 return state.tokenize(stream, state);
124 state.tokenize = tokenFactory(stream.current(), 'string');
125 return state.tokenize(stream, state);
130 state.tokenize = tokenFactory(stream.current(), 'string-2');
131 return state.tokenize(stream, state);
182 state.tokenize = tokenBase;
192 state.tokenize = tokenBase;
203 state.tokenize = tokenBase;
255 var style = state.tokenize(stream, state);
[all …]
Dclike.js22 state.tokenize = tokenString(ch);
23 return state.tokenize(stream, state);
35 state.tokenize = tokenComment;
69 state.tokenize = null;
78 state.tokenize = null;
111 tokenize: null,
127 var style = (state.tokenize || tokenBase)(stream, state);
148 if (state.tokenize != tokenBase && state.tokenize != null) return CodeMirror.Pass;
181 state.tokenize = cppHook;
186 state.tokenize = null;
[all …]
Dshell.js32 return tokenize(stream, state);
44 return tokenize(stream, state);
95 return tokenize(stream, state);
105 function tokenize(stream, state) { function
113 return tokenize(stream, state);
Dpython.js122 state.tokenize = tokenStringFactory(stream.current());
123 return state.tokenize(stream, state);
172 state.tokenize = tokenBase;
182 state.tokenize = tokenBase;
249 var style = state.tokenize(stream, state);
308 tokenize: tokenBase,
329 if (state.tokenize != tokenBase) {
330 return state.tokenize.isString ? CodeMirror.Pass : 0;
Dphp.js9 if (stream.match(delim)) state.tokenize = null;
34 state.tokenize = heredoc(stream.current().slice(3));
35 return state.tokenize(stream, state);
Djavascript.js59 state.tokenize = f;
133 state.tokenize = jsTokenBase;
142 state.tokenize = jsTokenBase;
409 tokenize: jsTokenBase,
426 if (state.tokenize != jsTokenComment && stream.eatSpace()) return null;
427 var style = state.tokenize(stream, state);
434 if (state.tokenize == jsTokenComment) return CodeMirror.Pass;
435 if (state.tokenize != jsTokenBase) return 0;
/external/chromium_org/third_party/libjingle/source/talk/base/
Dstringencode_unittest.cc236 EXPECT_EQ(5ul, tokenize("one two three four five", ' ', &fields)); in TEST()
238 EXPECT_EQ(1ul, tokenize("one", ' ', &fields)); in TEST()
242 EXPECT_EQ(5ul, tokenize(" one two three four five ", ' ', &fields)); in TEST()
244 EXPECT_EQ(1ul, tokenize(" one ", ' ', &fields)); in TEST()
246 EXPECT_EQ(0ul, tokenize(" ", ' ', &fields)); in TEST()
253 tokenize("find middle one", ' ', &fields); in TEST()
259 tokenize(" find middle one ", ' ', &fields); in TEST()
263 tokenize(" ", ' ', &fields); in TEST()
283 ASSERT_EQ(0ul, tokenize("D \"A B", ' ', '(', ')', NULL)); in TEST()
286 tokenize("A B C", ' ', '"', '"', &fields); in TEST()
[all …]
/external/chromium_org/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/
Dpep8.py102 import tokenize
370 if (token_type == tokenize.OP and
373 prev_type == tokenize.NAME and
443 prev_type = tokenize.OP
446 if token_type in (tokenize.NL, tokenize.NEWLINE, tokenize.ERRORTOKEN):
457 elif token_type == tokenize.OP:
464 if ((prev_type != tokenize.OP or prev_text in '}])') and not
465 (prev_type == tokenize.NAME and iskeyword(prev_text))):
552 if token_type == tokenize.NL:
554 if token_type == tokenize.COMMENT:
[all …]
/external/chromium_org/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/coverage/
Dphystokens.py3 import keyword, re, token, tokenize
38 if last_ttype == tokenize.COMMENT:
77 ws_tokens = [token.INDENT, token.DEDENT, token.NEWLINE, tokenize.NL]
81 tokgen = tokenize.generate_tokens(StringIO(source).readline)
98 tok_class = tokenize.tok_name.get(ttype, 'xx').lower()[:3]
Dbackward.py76 import tokenize
78 open_source = tokenize.open # pylint: disable=E1101
81 detect_encoding = tokenize.detect_encoding # pylint: disable=E1101
/external/chromium_org/third_party/WebKit/Source/devtools/front_end/
DDOMSyntaxHighlighter.js77 var tokenize = WebInspector.CodeMirrorUtils.createTokenizer(this._mimeType);
81 tokenize(line, processToken.bind(this));
DCodeMirrorUtils.js40 function tokenize(line, callback) function
50 return tokenize;
DCSSFormatter.js48 var tokenize = WebInspector.CodeMirrorUtils.createTokenizer("text/css");
53 tokenize(line, this._tokenCallback.bind(this, i));
/external/chromium_org/remoting/webapp/
Dclient_plugin_async.js118 var tokenize = function(str) { function
141 /** @type {Array.<string>} */ tokenize(message.data['apiFeatures']);
152 requestedCapabilities = tokenize(message.data['requestedCapabilities']);
162 supportedCapabilities = tokenize(message.data['supportedCapabilities']);
294 var capabilities = tokenize(message.data['capabilities']);
/external/sqlite/android/
Dsqlite3_android.cpp258 static void tokenize(sqlite3_context * context, int argc, sqlite3_value ** argv) in tokenize() function
449 …err = sqlite3_create_function(handle, "_TOKENIZE", 4, SQLITE_UTF16, collator, tokenize, NULL, NULL… in register_localized_collators()
453 …err = sqlite3_create_function(handle, "_TOKENIZE", 5, SQLITE_UTF16, collator, tokenize, NULL, NULL… in register_localized_collators()
457 …err = sqlite3_create_function(handle, "_TOKENIZE", 6, SQLITE_UTF16, collator, tokenize, NULL, NULL… in register_localized_collators()
/external/apache-xml/src/main/java/org/apache/xpath/compiler/
DLexer.java96 void tokenize(String pat) throws javax.xml.transform.TransformerException in tokenize() method in Lexer
98 tokenize(pat, null); in tokenize()
109 void tokenize(String pat, Vector targetStrings) in tokenize() method in Lexer
/external/chromium_org/third_party/WebKit/Source/core/html/parser/
DCSSPreloadScanner.cpp59 tokenize(*it, source); in scanCommon()
79 inline void CSSPreloadScanner::tokenize(UChar c, const SegmentedString& source) in tokenize() function in WebCore::CSSPreloadScanner

123