/third_party/node/deps/acorn/acorn/dist/ |
D | acorn.d.ts | 48 type: TokenType; 108 class TokenType { 118 updateContext?: (prevType: TokenType) => void 123 num: TokenType 124 regexp: TokenType 125 string: TokenType 126 name: TokenType 127 privateId: TokenType 128 eof: TokenType 129 bracketL: TokenType [all …]
|
/third_party/protobuf/csharp/src/Google.Protobuf/ |
D | JsonToken.cs | 40 private static readonly JsonToken _true = new JsonToken(TokenType.True); 41 private static readonly JsonToken _false = new JsonToken(TokenType.False); 42 private static readonly JsonToken _null = new JsonToken(TokenType.Null); 43 private static readonly JsonToken startObject = new JsonToken(TokenType.StartObject); 44 private static readonly JsonToken endObject = new JsonToken(TokenType.EndObject); 45 private static readonly JsonToken startArray = new JsonToken(TokenType.StartArray); 46 private static readonly JsonToken endArray = new JsonToken(TokenType.EndArray); 47 private static readonly JsonToken endDocument = new JsonToken(TokenType.EndDocument); 60 return new JsonToken(TokenType.Name, stringValue: name); in Name() 65 return new JsonToken(TokenType.StringValue, stringValue: value); in Value() [all …]
|
D | JsonParser.cs | 168 if (token.Type != JsonToken.TokenType.StartObject) in Merge() 181 if (token.Type == JsonToken.TokenType.EndObject) in Merge() 185 if (token.Type != JsonToken.TokenType.Name) in Merge() 223 if (token.Type == JsonToken.TokenType.Null) in MergeField() 256 if (token.Type != JsonToken.TokenType.StartArray) in MergeRepeatedField() 265 if (token.Type == JsonToken.TokenType.EndArray) in MergeRepeatedField() 283 if (token.Type != JsonToken.TokenType.StartObject) in MergeMapField() 300 if (token.Type == JsonToken.TokenType.EndObject) in MergeMapField() 329 if (token.Type == JsonToken.TokenType.Null) in ParseSingleValue() 366 case JsonToken.TokenType.True: in ParseSingleValue() [all …]
|
D | JsonTokenizer.cs | 91 if (token.Type == JsonToken.TokenType.StartObject) in PushBack() 95 else if (token.Type == JsonToken.TokenType.EndObject) in PushBack() 121 if (tokenToReturn.Type == JsonToken.TokenType.StartObject) in Next() 125 else if (tokenToReturn.Type == JsonToken.TokenType.EndObject) in Next() 156 case JsonToken.TokenType.EndArray: in SkipValue() 157 case JsonToken.TokenType.EndObject: in SkipValue() 160 case JsonToken.TokenType.StartArray: in SkipValue() 161 case JsonToken.TokenType.StartObject: in SkipValue()
|
/third_party/typescript/src/services/ |
D | classifier2020.ts | 9 export const enum TokenType { enum 99 … if (typeIdx === TokenType.parameter && isRightSideOfQualifiedNameOrPropertyAccess(node)) { 100 typeIdx = TokenType.property; 115 if (typeIdx !== TokenType.class && typeIdx !== TokenType.interface) { 120 …if ((typeIdx === TokenType.variable || typeIdx === TokenType.function) && isLocalDeclaration(decl,… 146 function classifySymbol(symbol: Symbol, meaning: SemanticMeaning): TokenType | undefined { 149 return TokenType.class; 152 return TokenType.enum; 155 return TokenType.type; 159 return TokenType.interface; [all …]
|
/third_party/typescript/tests/baselines/reference/ |
D | assignmentCompatForEnums.types | 2 enum TokenType { One, Two }; 3 >TokenType : TokenType 4 >One : TokenType.One 5 >Two : TokenType.Two 12 function returnType(): TokenType { return null; } 13 >returnType : () => TokenType 20 >x : TokenType 21 >returnType() : TokenType 22 >returnType : () => TokenType 24 var x: TokenType = list['one']; [all …]
|
D | assignmentCompatForEnums.js | 2 enum TokenType { One, Two }; 7 function returnType(): TokenType { return null; } 12 var x: TokenType = list['one']; 18 var TokenType; 19 (function (TokenType) { argument 20 TokenType[TokenType["One"] = 0] = "One"; 21 TokenType[TokenType["Two"] = 1] = "Two"; 22 })(TokenType || (TokenType = {}));
|
D | assignmentCompatForEnums.symbols | 2 enum TokenType { One, Two }; 3 >TokenType : Symbol(TokenType, Decl(assignmentCompatForEnums.ts, 0, 0)) 4 >One : Symbol(TokenType.One, Decl(assignmentCompatForEnums.ts, 0, 16)) 5 >Two : Symbol(TokenType.Two, Decl(assignmentCompatForEnums.ts, 0, 21)) 11 function returnType(): TokenType { return null; } 13 >TokenType : Symbol(TokenType, Decl(assignmentCompatForEnums.ts, 0, 0)) 22 var x: TokenType = list['one']; 24 >TokenType : Symbol(TokenType, Decl(assignmentCompatForEnums.ts, 0, 0))
|
/third_party/spirv-tools/tools/sva/src/ |
D | parser.js | 15 import { TokenType } from "./token.js"; 57 if (token === TokenType.kError) { 61 if (token.type === TokenType.kEOF) 65 if (token.type === TokenType.kResultId) { 69 if (token.type !== TokenType.kEqual) { 77 if (token.type !== TokenType.kOp) { 149 if (n0.type === TokenType.kOp || n0.type === TokenType.kEOF) { 154 if (n1.type === TokenType.kEOF) { 157 if (n0.type === TokenType.kResultId && n1.type === TokenType.kEqual) 181 if (t.type !== TokenType.kResultId) { [all …]
|
D | lexer_test.js | 17 import { TokenType } from "./token"; 26 assert.equal(t.type, TokenType.kOp); 31 assert.equal(t.type, TokenType.kEOF); 43 assert.equal(t.type, TokenType.kOp); 48 assert.equal(t.type, TokenType.kResultId); 66 assert.equal(t.type, TokenType.kFloatLiteral, 72 assert.equal(t.type, TokenType.kEOF); 83 assert.notEqual(t.type, TokenType.kFloatLiteral, 98 assert.equal(t.type, TokenType.kIntegerLiteral, 104 assert.equal(t.type, TokenType.kEOF); [all …]
|
D | lexer.js | 15 import { Token, TokenType } from "./token.js"; 42 return new Token(TokenType.kEOF, this.cur_line_); 76 return new Token(TokenType.kError, this.cur_line_, "Failed to match token"); 169 return new Token(TokenType.kFloatLiteral, this.cur_line_, parseFloat(substr)); 196 return new Token(TokenType.kIntegerLiteral, this.cur_line_, val); 222 return new Token(TokenType.kIntegerLiteral, this.cur_line_, val); 249 return new Token(TokenType.kResultId, this.cur_line_, { 271 return new Token(TokenType.kIdentifier, this.cur_line_, ident); 294 return new Token(TokenType.kOp, this.cur_line_, { 307 type = TokenType.kEqual; [all …]
|
D | token.js | 15 const TokenType = { variable 55 export {Token, TokenType};
|
/third_party/skia/third_party/externals/swiftshader/third_party/SPIRV-Tools/tools/sva/src/ |
D | parser.js | 15 import { TokenType } from "./token.js"; 57 if (token === TokenType.kError) { 61 if (token.type === TokenType.kEOF) 65 if (token.type === TokenType.kResultId) { 69 if (token.type !== TokenType.kEqual) { 77 if (token.type !== TokenType.kOp) { 149 if (n0.type === TokenType.kOp || n0.type === TokenType.kEOF) { 154 if (n1.type === TokenType.kEOF) { 157 if (n0.type === TokenType.kResultId && n1.type === TokenType.kEqual) 181 if (t.type !== TokenType.kResultId) { [all …]
|
D | lexer_test.js | 17 import { TokenType } from "./token"; 26 assert.equal(t.type, TokenType.kOp); 31 assert.equal(t.type, TokenType.kEOF); 43 assert.equal(t.type, TokenType.kOp); 48 assert.equal(t.type, TokenType.kResultId); 66 assert.equal(t.type, TokenType.kFloatLiteral, 72 assert.equal(t.type, TokenType.kEOF); 83 assert.notEqual(t.type, TokenType.kFloatLiteral, 98 assert.equal(t.type, TokenType.kIntegerLiteral, 104 assert.equal(t.type, TokenType.kEOF); [all …]
|
D | lexer.js | 15 import { Token, TokenType } from "./token.js"; 42 return new Token(TokenType.kEOF, this.cur_line_); 76 return new Token(TokenType.kError, this.cur_line_, "Failed to match token"); 169 return new Token(TokenType.kFloatLiteral, this.cur_line_, parseFloat(substr)); 196 return new Token(TokenType.kIntegerLiteral, this.cur_line_, val); 222 return new Token(TokenType.kIntegerLiteral, this.cur_line_, val); 249 return new Token(TokenType.kResultId, this.cur_line_, { 271 return new Token(TokenType.kIdentifier, this.cur_line_, ident); 294 return new Token(TokenType.kOp, this.cur_line_, { 307 type = TokenType.kEqual; [all …]
|
D | token.js | 15 const TokenType = { variable 55 export {Token, TokenType};
|
/third_party/skia/third_party/externals/spirv-tools/tools/sva/src/ |
D | parser.js | 15 import { TokenType } from "./token.js"; 57 if (token === TokenType.kError) { 61 if (token.type === TokenType.kEOF) 65 if (token.type === TokenType.kResultId) { 69 if (token.type !== TokenType.kEqual) { 77 if (token.type !== TokenType.kOp) { 149 if (n0.type === TokenType.kOp || n0.type === TokenType.kEOF) { 154 if (n1.type === TokenType.kEOF) { 157 if (n0.type === TokenType.kResultId && n1.type === TokenType.kEqual) 181 if (t.type !== TokenType.kResultId) { [all …]
|
D | lexer_test.js | 17 import { TokenType } from "./token"; 26 assert.equal(t.type, TokenType.kOp); 31 assert.equal(t.type, TokenType.kEOF); 43 assert.equal(t.type, TokenType.kOp); 48 assert.equal(t.type, TokenType.kResultId); 66 assert.equal(t.type, TokenType.kFloatLiteral, 72 assert.equal(t.type, TokenType.kEOF); 83 assert.notEqual(t.type, TokenType.kFloatLiteral, 98 assert.equal(t.type, TokenType.kIntegerLiteral, 104 assert.equal(t.type, TokenType.kEOF); [all …]
|
D | lexer.js | 15 import { Token, TokenType } from "./token.js"; 42 return new Token(TokenType.kEOF, this.cur_line_); 76 return new Token(TokenType.kError, this.cur_line_, "Failed to match token"); 169 return new Token(TokenType.kFloatLiteral, this.cur_line_, parseFloat(substr)); 196 return new Token(TokenType.kIntegerLiteral, this.cur_line_, val); 222 return new Token(TokenType.kIntegerLiteral, this.cur_line_, val); 249 return new Token(TokenType.kResultId, this.cur_line_, { 271 return new Token(TokenType.kIdentifier, this.cur_line_, ident); 294 return new Token(TokenType.kOp, this.cur_line_, { 307 type = TokenType.kEqual; [all …]
|
D | token.js | 15 const TokenType = { variable 55 export {Token, TokenType};
|
/third_party/vk-gl-cts/external/amber/src/src/ |
D | tokenizer.h | 26 enum class TokenType : uint8_t { enum 39 explicit Token(TokenType type); 42 bool IsHex() const { return type_ == TokenType::kHex; } in IsHex() 43 bool IsInteger() const { return type_ == TokenType::kInteger; } in IsInteger() 44 bool IsDouble() const { return type_ == TokenType::kDouble; } in IsDouble() 45 bool IsIdentifier() const { return type_ == TokenType::kIdentifier; } in IsIdentifier() 46 bool IsString() const { return type_ == TokenType::kString; } in IsString() 47 bool IsEOS() const { return type_ == TokenType::kEOS; } in IsEOS() 48 bool IsEOL() const { return type_ == TokenType::kEOL; } in IsEOL() 51 return type_ == TokenType::kIdentifier && string_value_ == ","; in IsComma() [all …]
|
D | tokenizer.cc | 26 Token::Token(TokenType type) : type_(type) {} in Token() 51 type_ = TokenType::kDouble; in ConvertToDouble() 62 return MakeUnique<Token>(TokenType::kEOS); in NextToken() 69 return MakeUnique<Token>(TokenType::kEOS); in NextToken() 74 return MakeUnique<Token>(TokenType::kEOL); in NextToken() 93 auto tok = MakeUnique<Token>(TokenType::kString); in NextToken() 152 auto tok = MakeUnique<Token>(TokenType::kString); in NextToken() 161 auto tok = MakeUnique<Token>(TokenType::kIdentifier); in NextToken() 208 auto tok = MakeUnique<Token>(TokenType::kIdentifier); in NextToken() 215 auto tok = MakeUnique<Token>(TokenType::kHex); in NextToken() [all …]
|
/third_party/parse5/packages/parse5/lib/common/ |
D | token.ts | 3 export enum TokenType { enum 48 readonly type: TokenType; 53 readonly type: TokenType.DOCTYPE; 72 readonly type: TokenType.START_TAG | TokenType.END_TAG; 93 readonly type: TokenType.COMMENT; 98 readonly type: TokenType.EOF; 102 type: TokenType.CHARACTER | TokenType.NULL_CHARACTER | TokenType.WHITESPACE_CHARACTER;
|
/third_party/protobuf/src/google/protobuf/util/internal/ |
D | json_stream_parser.h | 96 enum TokenType { enum 142 util::Status ParseValue(TokenType type); 174 util::Status ParseEntry(TokenType type); 177 util::Status ParseEntryMid(TokenType type); 180 util::Status ParseObjectMid(TokenType type); 186 util::Status ParseArrayValue(TokenType type); 189 util::Status ParseArrayMid(TokenType type); 198 bool IsEmptyNullAllowed(TokenType type); 223 TokenType GetNextTokenType();
|
/third_party/typescript/tests/cases/compiler/ |
D | assignmentCompatForEnums.ts | 1 enum TokenType { One, Two }; enum 6 function returnType(): TokenType { return null; } 11 var x: TokenType = list['one'];
|