1// Copyright 2021 The Tint Authors. 2// 3// Licensed under the Apache License, Version 2.0 (the "License"); 4// you may not use this file except in compliance with the License. 5// You may obtain a copy of the License at 6// 7// http://www.apache.org/licenses/LICENSE-2.0 8// 9// Unless required by applicable law or agreed to in writing, software 10// distributed under the License is distributed on an "AS IS" BASIS, 11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12// See the License for the specific language governing permissions and 13// limitations under the License. 14 15// This tool parses WGSL specification and outputs WGSL rules. 16// 17// To run from root of tint repo: 18// go get golang.org/x/net/html # Only required once 19// Then run 20// ./tools/get-test-plan --spec=<path-to-spec-file-or-url> --output=<path-to-output-file> 21// Or run 22// cd tools/src && go run cmd/get-spec-rules/main.go --output=<path-to-output-file> 23// 24// To see help 25// ./tools/get-test-plan --help 26 27package main 28 29import ( 30 "crypto/sha1" 31 "encoding/json" 32 "errors" 33 "flag" 34 "fmt" 35 "io" 36 "io/ioutil" 37 "net/http" 38 "net/url" 39 "os" 40 "path/filepath" 41 "regexp" 42 "strconv" 43 "strings" 44 45 "golang.org/x/net/html" 46) 47 48const ( 49 toolName = "get-test-plan" 50 specPath = "https://www.w3.org/TR/WGSL/" 51 specVersionUsed = "https://www.w3.org/TR/2021/WD-WGSL-20210929/" 52) 53 54var ( 55 errInvalidArg = errors.New("invalid arguments") 56 headURL = specVersionUsed 57 markedNodesSet = make(map[*html.Node]bool) 58 testNamesSet = make(map[string]bool) 59 sha1sSet = make(map[string]bool) 60 keywords = []string{ 61 "MUST ", "MUST NOT ", "REQUIRED ", "SHALL ", 62 "SHALL NOT ", "SHOULD ", "SHOULD NOT ", 63 "RECOMMENDED ", "MAY ", "OPTIONAL ", 64 } 65 globalSection = "" 66 globalPrevSectionX = -1 67 globalRuleCounter = 0 68) 69 70// Holds all the information about a wgsl rule 71type rule struct { 72 Number int // The index of this obj in an array of 'rules' 73 Section int // The section this rule belongs to 74 SubSection string // The section this rule belongs to 75 URL string // The section's URL of this rule 76 Description string // The rule's description 77 TestName string // The suggested test name to use when writing CTS 78 Keyword string // The keyword eg. MUST, ALGORITHM, ..., ie. Indicating why the rule is added 79 Desc []string 80 Sha string 81} 82 83func main() { 84 flag.Usage = func() { 85 out := flag.CommandLine.Output() 86 fmt.Fprintf(out, "%v parses WGSL spec and outputs a test plan\n", toolName) 87 fmt.Fprintf(out, "\n") 88 fmt.Fprintf(out, "Usage:\n") 89 fmt.Fprintf(out, " %s [spec] [flags]\n", toolName) 90 fmt.Fprintf(out, "\n") 91 fmt.Fprintf(out, "spec is an optional local file or a URL to the WGSL specification.\n") 92 fmt.Fprintf(out, "If spec is omitted then the specification is fetched from %v\n\n", specPath) 93 94 fmt.Fprintf(out, "this tools is developed based on: %v\n", specVersionUsed) 95 fmt.Fprintf(out, "flags may be any combination of:\n") 96 flag.PrintDefaults() 97 } 98 99 err := run() 100 switch err { 101 case nil: 102 return 103 case errInvalidArg: 104 fmt.Fprintf(os.Stderr, "Error: %v\n\n", err) 105 flag.Usage() 106 default: 107 fmt.Fprintf(os.Stderr, "%v\n", err) 108 } 109 os.Exit(1) 110} 111 112func run() error { 113 // Parse flags 114 keyword := flag.String("keyword", "", 115 `if provided, it will be used as the keyword to search WGSL spec for rules 116if omitted, the keywords indicated in RFC 2119 requirement are used, 117in addition to nodes containing a nowrap or an algorithm tag eg. <tr algorithm=...>`) 118 119 ctsDir := flag.String("cts-directory", "", 120 `if provided: 121 validation cts test plan will be written to: '<cts-directory>/validation/' 122 builtin functions cts test plan will be written to: '<cts-directory>/execution/builtin'`) 123 124 output := flag.String("output", "", 125 `if file extension is 'txt' the output format will be a human readable text 126if file extension is 'tsv' the output format will be a tab separated file 127if file extension is 'json' the output format will be json 128if omitted, a human readable version of the rules is written to stdout`) 129 130 flag.Parse() 131 132 args := flag.Args() 133 134 // Parse spec 135 spec, err := parseSpec(args) 136 if err != nil { 137 return err 138 } 139 140 // Set keywords 141 if *keyword != "" { 142 keywords = []string{*keyword} 143 } 144 145 parser, err := Parse(spec) 146 if err != nil { 147 return err 148 } 149 rules := parser.rules 150 151 if *ctsDir != "" { 152 getUnimplementedTestPlan(*parser, *ctsDir) 153 } 154 155 txt, tsv := concatRules(rules) 156 // if no output then write rules to stdout 157 if *output == "" { 158 fmt.Println(txt) 159 // write concatenated rules to file 160 } else if strings.HasSuffix(*output, ".json") { 161 j, err := json.Marshal(rules) 162 if err != nil { 163 return err 164 } 165 return writeFile(*output, string(j)) 166 } else if strings.HasSuffix(*output, ".txt") { 167 return writeFile(*output, txt) 168 } else if strings.HasSuffix(*output, ".tsv") { 169 return writeFile(*output, tsv) 170 } else { 171 return fmt.Errorf("unsupported output file extension: %v", *output) 172 } 173 return nil 174} 175 176// getSectionRange scans all the rules and returns the rule index interval of a given section. 177// The sections range is the interval: rules[start:end]. 178// example: section = [x, y, z] ie. x.y.z(.w)* it returns (start = min(w),end = max(w)) 179// if there are no rules extracted from x.y.z it returns (-1, -1) 180func getSectionRange(rules []rule, s []int) (start, end int, err error) { 181 start = -1 182 end = -1 183 for _, r := range rules { 184 sectionDims, err := parseSection(r.SubSection) 185 if err != nil { 186 return -1, -1, err 187 } 188 189 ruleIsInSection := true 190 for i := range s { 191 if sectionDims[i] != s[i] { 192 ruleIsInSection = false 193 break 194 } 195 } 196 if !ruleIsInSection { 197 continue 198 } 199 200 dim := -1 201 if len(sectionDims) == len(s) { 202 //x.y is the same as x.y.0 203 dim = 0 204 } else if len(sectionDims) > len(s) { 205 dim = sectionDims[len(s)] 206 } else { 207 continue 208 } 209 210 if start == -1 { 211 start = dim 212 } 213 if dim > end { 214 end = dim 215 } 216 } 217 218 if start == -1 || end == -1 { 219 return -1, -1, fmt.Errorf("cannot determine section range") 220 } 221 222 return start, end, nil 223} 224 225// parseSection return the numbers for any dot-seprated string of numbers 226// example: x.y.z.w returns [x, y, z, w] 227// returns an error if the string does not match "^\d(.\d)*$" 228func parseSection(in string) ([]int, error) { 229 parts := strings.Split(in, ".") 230 out := make([]int, len(parts)) 231 for i, part := range parts { 232 var err error 233 out[i], err = strconv.Atoi(part) 234 if err != nil { 235 return nil, fmt.Errorf(`cannot parse sections string "%v": %w`, in, err) 236 } 237 } 238 return out, nil 239} 240 241// concatRules concatnate rule slice to and makes two string output 242// txt is a human readable string 243// tsv is a tab saparated string 244func concatRules(rules []rule) (string, string) { 245 txtLines := []string{} 246 tsvLines := []string{"Number\tUniqueId\tSection\tURL\tDescription\tProposed Test Name\tkeyword"} 247 for _, r := range rules { 248 txtLines = append(txtLines, strings.Join([]string{ 249 "Rule Number " + strconv.Itoa(r.Number) + ":", 250 "Unique Id: " + r.Sha, 251 "Section: " + r.SubSection, 252 "Keyword: " + r.Keyword, 253 "testName: " + r.TestName, 254 "URL: " + r.URL, 255 r.Description, 256 "---------------------------------------------------"}, "\n")) 257 258 tsvLines = append(tsvLines, strings.Join([]string{ 259 strconv.Itoa(r.Number), 260 r.Sha, 261 r.SubSection, 262 r.URL, 263 strings.Trim(r.Description, "\n\t "), 264 r.Keyword, 265 r.TestName}, "\t")) 266 } 267 txt := strings.Join(txtLines, "\n") 268 tsv := strings.Join(tsvLines, "\n") 269 return txt, tsv 270} 271 272// writeFile writes content to path 273// the existing content will be overwritten 274func writeFile(path, content string) error { 275 if err := os.MkdirAll(filepath.Dir(path), 0777); err != nil { 276 return fmt.Errorf("failed to create directory for '%v': %w", path, err) 277 } 278 if err := ioutil.WriteFile(path, []byte(content), 0666); err != nil { 279 return fmt.Errorf("failed to write file '%v': %w", path, err) 280 } 281 return nil 282} 283 284// parseSpec reads the spec from a local file, or the URL to WGSL spec 285func parseSpec(args []string) (*html.Node, error) { 286 // Check for explicit WGSL spec path 287 specURL, _ := url.Parse(specPath) 288 switch len(args) { 289 case 0: 290 case 1: 291 var err error 292 specURL, err = url.Parse(args[0]) 293 if err != nil { 294 return nil, err 295 } 296 default: 297 if len(args) > 1 { 298 return nil, errInvalidArg 299 } 300 } 301 302 // The specURL might just be a local file path, in which case automatically 303 // add the 'file' URL scheme 304 if specURL.Scheme == "" { 305 specURL.Scheme = "file" 306 } 307 308 // Open the spec from HTTP(S) or from a local file 309 var specContent io.ReadCloser 310 switch specURL.Scheme { 311 case "http", "https": 312 response, err := http.Get(specURL.String()) 313 if err != nil { 314 return nil, fmt.Errorf("failed to load the WGSL spec from '%v': %w", specURL, err) 315 } 316 specContent = response.Body 317 case "file": 318 path, err := filepath.Abs(specURL.Path) 319 if err != nil { 320 return nil, fmt.Errorf("failed to load the WGSL spec from '%v': %w", specURL, err) 321 } 322 323 file, err := os.Open(path) 324 if err != nil { 325 return nil, fmt.Errorf("failed to load the WGSL spec from '%v': %w", specURL, err) 326 } 327 specContent = file 328 default: 329 return nil, fmt.Errorf("unsupported URL scheme: %v", specURL.Scheme) 330 } 331 defer specContent.Close() 332 333 // Open the spec from HTTP(S) or from a local file 334 switch specURL.Scheme { 335 case "http", "https": 336 response, err := http.Get(specURL.String()) 337 if err != nil { 338 return nil, fmt.Errorf("failed to load the WGSL spec from '%v': %w", specURL, err) 339 } 340 specContent = response.Body 341 342 case "file": 343 path, err := filepath.Abs(specURL.Path) 344 if err != nil { 345 return nil, fmt.Errorf("failed to load the WGSL spec from '%v': %w", specURL, err) 346 } 347 file, err := os.Open(path) 348 if err != nil { 349 return nil, fmt.Errorf("failed to load the WGSL spec from '%v': %w", specURL, err) 350 } 351 specContent = file 352 353 default: 354 return nil, fmt.Errorf("unsupported URL scheme: %v", specURL.Scheme) 355 } 356 defer specContent.Close() 357 358 // Parse spec 359 spec, err := html.Parse(specContent) 360 if err != nil { 361 return spec, err 362 } 363 return spec, nil 364} 365 366// containsKeyword returns (true, 'kw'), if input string 'data' contains an 367// element of the string list, otherwise it returns (false, "") 368// search is not case sensitive 369func containsKeyword(data string, list []string) (bool, string) { 370 for _, kw := range list { 371 if strings.Contains( 372 strings.ToLower(data), 373 strings.ToLower(kw), 374 ) { 375 return true, kw 376 } 377 } 378 return false, "" 379} 380 381// parser holds the information extracted from the spec 382// TODO(sarahM0): https://bugs.c/tint/1149/ clean up the vars holding section information 383type Parser struct { 384 rules []rule // a slice to store the rules extracted from the spec 385 firstSectionContainingRule int // the first section a rules is extracted from 386 lastSectionContainingRule int // the last section a rules is extracted form 387} 388 389func Parse(node *html.Node) (*Parser, error) { 390 var p *Parser = new(Parser) 391 p.firstSectionContainingRule = -1 392 p.lastSectionContainingRule = -1 393 return p, p.getRules(node) 394} 395 396// getRules populates the rule slice by scanning HTML node and its children 397func (p *Parser) getRules(node *html.Node) error { 398 section, subSection, err := getSectionInfo(node) 399 400 if err != nil { 401 //skip this node and move on to its children 402 } else { 403 // Do not generate rules for introdoctory sections 404 if section > 2 { 405 // Check if this node is visited before. This is necessary since 406 // sometimes to create rule description we visit siblings or children 407 if marked := markedNodesSet[node]; marked { 408 return nil 409 } 410 411 // update parser's section info 412 if p.firstSectionContainingRule == -1 { 413 p.firstSectionContainingRule = section 414 } 415 p.lastSectionContainingRule = section 416 417 // extract rules from the node 418 if err := p.getAlgorithmRule(node, section, subSection); err != nil { 419 return err 420 } 421 if err := p.getNowrapRule(node, section, subSection); err != nil { 422 return err 423 } 424 if err := p.getKeywordRule(node, section, subSection); err != nil { 425 return err 426 } 427 } 428 } 429 430 for child := node.FirstChild; child != nil; child = child.NextSibling { 431 if err := p.getRules(child); err != nil { 432 return err 433 } 434 } 435 return nil 436} 437 438// gatherKeyworkRules scans the HTML node data, adds a new rules if it contains one 439// of the keywords 440func (p *Parser) getKeywordRule(node *html.Node, section int, subSection string) error { 441 if node.Type != html.TextNode { 442 return nil 443 } 444 445 hasKeyword, keyword := containsKeyword(node.Data, keywords) 446 if !hasKeyword { 447 return nil 448 } 449 450 // TODO(sarah): create a list of rule.sha1 for unwanted rules 451 if strings.HasPrefix(node.Data, "/*") || 452 strings.Contains(node.Data, "reference must load and store from the same") || 453 strings.Contains(node.Data, " to an invalid reference may either: ") || 454 // Do not add Issues 455 strings.Contains(node.Data, "Issue: ") || 456 strings.Contains(node.Data, "WebGPU issue") || 457 strings.Contains(node.Data, "/issues/") { 458 return nil 459 } 460 461 id := getID(node) 462 desc := cleanUpString(getNodeData(node)) 463 464 t, _, err := testName(id, desc, subSection) 465 if err != nil { 466 return err 467 } 468 469 sha, err := getSha1(desc, id) 470 if err != nil { 471 return err 472 } 473 474 r := rule{ 475 Sha: sha, 476 Number: len(p.rules) + 1, 477 Section: section, 478 SubSection: subSection, 479 URL: headURL + "#" + id, 480 Description: desc, 481 TestName: t, 482 Keyword: keyword, 483 } 484 p.rules = append(p.rules, r) 485 486 return nil 487} 488 489// getNodeData builds the rule's description from the HTML node's data and all of its siblings. 490// the node data is a usually a partial sentence, build the description from the node's data and 491// all it's siblings to get a full context of the rule. 492func getNodeData(node *html.Node) string { 493 sb := strings.Builder{} 494 if node.Parent != nil { 495 for n := node.Parent.FirstChild; n != nil; n = n.NextSibling { 496 printNodeText(n, &sb) 497 } 498 } else { 499 printNodeText(node, &sb) 500 } 501 return sb.String() 502} 503 504// getAlgorithmRules scans the HTML node for blocks that 505// contain an 'algorithm' class, populating the rule slice. 506// ie. <tr algorithm=...> and <p algorithm=...> 507func (p *Parser) getAlgorithmRule(node *html.Node, section int, subSection string) error { 508 if !hasClass(node, "algorithm") { 509 return nil 510 } 511 // mark this node as seen 512 markedNodesSet[node] = true 513 514 sb := strings.Builder{} 515 printNodeText(node, &sb) 516 title := cleanUpStartEnd(getNodeAttrValue(node, "data-algorithm")) 517 desc := title + ":\n" + cleanUpString(sb.String()) 518 id := getID(node) 519 testName, _, err := testName(id, desc, subSection) 520 if err != nil { 521 return err 522 } 523 524 sha, err := getSha1(desc, id) 525 if err != nil { 526 return err 527 } 528 529 r := rule{ 530 Sha: sha, 531 Number: len(p.rules) + 1, 532 Section: section, 533 SubSection: subSection, 534 URL: headURL + "#" + id, 535 Description: desc, 536 TestName: testName, 537 Keyword: "ALGORITHM", 538 } 539 p.rules = append(p.rules, r) 540 return nil 541} 542 543// getNowrapRules scans the HTML node for blocks that contain a 544// 'nowrap' class , populating the rule slice. 545// ie. <td class="nowrap"> 546// TODO(https://crbug.com/tint/1157) 547// remove this when https://github.com/gpuweb/gpuweb/pull/2084 is closed 548// and make sure Derivative built-in functions are added to the rules 549func (p *Parser) getNowrapRule(node *html.Node, section int, subSection string) error { 550 if !hasClass(node, "nowrap") { 551 return nil 552 } 553 // mark this node as seen 554 markedNodesSet[node] = true 555 desc := cleanUpStartEnd(getNodeData(node)) 556 id := getID(node) 557 558 t, _, err := testName(id, desc, subSection) 559 if err != nil { 560 return err 561 } 562 563 sha, err := getSha1(desc, id) 564 if err != nil { 565 return err 566 } 567 568 r := rule{ 569 Sha: sha, 570 Number: len(p.rules) + 1, 571 SubSection: subSection, 572 Section: section, 573 URL: headURL + "#" + id, 574 Description: desc, 575 TestName: t, 576 Keyword: "Nowrap", 577 } 578 p.rules = append(p.rules, r) 579 580 return nil 581} 582 583// hasClass returns true if node is has the given "class" attribute. 584func hasClass(node *html.Node, class string) bool { 585 for _, attr := range node.Attr { 586 if attr.Key == "class" { 587 classes := strings.Split(attr.Val, " ") 588 for _, c := range classes { 589 if c == class { 590 return true 591 } 592 } 593 } 594 } 595 return false 596} 597 598// getSectionInfo returns the section this node belongs to 599func getSectionInfo(node *html.Node) (int, string, error) { 600 sub := getNodeAttrValue(node, "data-level") 601 for p := node; sub == "" && p != nil; p = p.Parent { 602 sub = getSiblingSectionInfo(p) 603 } 604 // when there is and ISSUE in HTML section cannot be set 605 // use the previously set section 606 if sub == "" && globalSection == "" { 607 // for the section Abstract no section can be found 608 // return -1 to skip this node 609 return -1, "", fmt.Errorf("cannot get section info") 610 } 611 if sub == "" { 612 sub = globalSection 613 } 614 globalSection = sub 615 sectionDims, err := parseSection(sub) 616 if len(sectionDims) > -1 { 617 return sectionDims[0], sub, err 618 } 619 return -1, sub, err 620} 621 622// getSection return the section of this node's sibling 623// iterates over all siblings and return the first one it can determine 624func getSiblingSectionInfo(node *html.Node) string { 625 for sp := node.PrevSibling; sp != nil; sp = sp.PrevSibling { 626 section := getNodeAttrValue(sp, "data-level") 627 if section != "" { 628 return section 629 } 630 } 631 return "" 632} 633 634// GetSiblingSectionInfo determines if the node's id refers to an example 635func isExampleNode(node *html.Node) string { 636 for sp := node.PrevSibling; sp != nil; sp = sp.PrevSibling { 637 id := getNodeAttrValue(sp, "id") 638 if id != "" && !strings.Contains(id, "example-") { 639 return id 640 } 641 } 642 return "" 643} 644 645// getID returns the id of the section this node belongs to 646func getID(node *html.Node) string { 647 id := getNodeAttrValue(node, "id") 648 for p := node; id == "" && p != nil; p = p.Parent { 649 id = isExampleNode(p) 650 } 651 return id 652} 653 654var ( 655 reCleanUpString = regexp.MustCompile(`\n(\n|\s|\t)+|(\s|\t)+\n`) 656 reSpacePlusTwo = regexp.MustCompile(`\t|\s{2,}`) 657 reBeginOrEndWithSpace = regexp.MustCompile(`^\s|\s$`) 658 reIrregularWhiteSpace = regexp.MustCompile(`§.`) 659) 660 661// cleanUpString creates a string by removing all extra spaces, newlines and tabs 662// form input string 'in' and returns it 663// This is done so that the uniqueID does not change because of a change in white spaces 664// 665// example in: 666// ` float abs: 667// T is f32 or vecN<f32> 668// abs(e: T ) -> T 669// Returns the absolute value of e (e.g. e with a positive sign bit). Component-wise when T is a vector. 670// (GLSLstd450Fabs)` 671// 672// example out: 673// `float abs: 674// T is f32 or vecN<f32> abs(e: T ) -> T Returns the absolute value of e (e.g. e with a positive sign bit). Component-wise when T is a vector. (GLSLstd450Fabs)` 675func cleanUpString(in string) string { 676 out := reCleanUpString.ReplaceAllString(in, " ") 677 out = reSpacePlusTwo.ReplaceAllString(out, " ") 678 //`§.` is not a valid character for a cts description 679 // ie. this is invalid: g.test().desc(`§.`) 680 out = reIrregularWhiteSpace.ReplaceAllString(out, "section ") 681 out = reBeginOrEndWithSpace.ReplaceAllString(out, "") 682 return out 683} 684 685var ( 686 reCleanUpStartEnd = regexp.MustCompile(`^\s+|\s+$|^\t+|\t+$|^\n+|\n+$`) 687) 688 689// cleanUpStartEnd creates a string by removing all extra spaces, 690// newlines and tabs form the start and end of the input string. 691// Example: 692// input: "\s\t\nHello\s\n\t\Bye\s\s\s\t\n\n\n" 693// output: "Hello\s\n\tBye" 694// input2: "\nbye\n\n" 695// output2: "\nbye" 696func cleanUpStartEnd(in string) string { 697 out := reCleanUpStartEnd.ReplaceAllString(in, "") 698 return out 699} 700 701var ( 702 name = "^[a-zA-Z0-9_]+$" 703 reName = regexp.MustCompile(`[^a-zA-Z0-9_]`) 704 reUnderScore = regexp.MustCompile(`[_]+`) 705 reDoNotBegin = regexp.MustCompile(`^[0-9_]+|[_]$`) 706) 707 708// testName creates a test name given a rule id (ie. section name), description and section 709// returns for a builtin rule: 710// testName:${section name} + "," + ${builtin name} 711// builtinName: ${builtin name} 712// err: nil 713// returns for a other rules: 714// testName: ${section name} + "_rule_ + " + ${string(counter)} 715// builtinName: "" 716// err: nil 717// if it cannot create a unique name it returns "", "", err. 718func testName(id string, desc string, section string) (testName, builtinName string, err error) { 719 // regex for every thing other than letters and numbers 720 if desc == "" || section == "" || id == "" { 721 return "", "", fmt.Errorf("cannot generate test name") 722 } 723 // avoid any characters other than letters, numbers and underscore 724 id = reName.ReplaceAllString(id, "_") 725 // avoid underscore repeats 726 id = reUnderScore.ReplaceAllString(id, "_") 727 // test name must not start with underscore or a number 728 // nor end with and underscore 729 id = reDoNotBegin.ReplaceAllString(id, "") 730 731 sectionX, err := parseSection(section) 732 if err != nil { 733 return "", "", err 734 } 735 736 builtinName = "" 737 index := strings.Index(desc, ":") 738 if strings.Contains(id, "builtin_functions") && index > -1 { 739 builtinName = reName.ReplaceAllString(desc[:index], "_") 740 builtinName = reDoNotBegin.ReplaceAllString(builtinName, "") 741 builtinName = reUnderScore.ReplaceAllString(builtinName, "_") 742 match, _ := regexp.MatchString(name, builtinName) 743 if match { 744 testName = id + "," + builtinName 745 // in case there is more than one builtin functions 746 // with the same name in one section: 747 // "id,builtin", "id,builtin2", "id,builtin3", ... 748 for i := 2; testNamesSet[testName]; i++ { 749 testName = id + "," + builtinName + strconv.Itoa(i) 750 } 751 testNamesSet[testName] = true 752 return testName, builtinName, nil 753 } 754 755 } 756 757 if sectionX[0] == globalPrevSectionX { 758 globalRuleCounter++ 759 } else { 760 globalRuleCounter = 0 761 globalPrevSectionX = sectionX[0] 762 } 763 testName = id + ",rule" + strconv.Itoa(globalRuleCounter) 764 if testNamesSet[testName] { 765 testName = "error-unable-to-generate-unique-file-name" 766 return testName, "", fmt.Errorf("unable to generate unique test name\n" + desc) 767 } 768 testNamesSet[testName] = true 769 return testName, "", nil 770} 771 772// printNodeText traverses node and its children, writing the Data of all TextNodes to sb. 773func printNodeText(node *html.Node, sb *strings.Builder) { 774 // mark this node as seen 775 markedNodesSet[node] = true 776 if node.Type == html.TextNode { 777 sb.WriteString(node.Data) 778 } 779 780 for child := node.FirstChild; child != nil; child = child.NextSibling { 781 printNodeText(child, sb) 782 } 783} 784 785// getNodeAttrValue scans attributes of 'node' and returns the value of attribute 'key' 786// or an empty string if 'node' doesn't have attribute 'key' 787func getNodeAttrValue(node *html.Node, key string) string { 788 for _, attr := range node.Attr { 789 if attr.Key == key { 790 return attr.Val 791 } 792 } 793 return "" 794} 795 796// getSha1 returns the first 8 byte of sha1(a+b) 797func getSha1(a string, b string) (string, error) { 798 sum := sha1.Sum([]byte(a + b)) 799 sha := fmt.Sprintf("%x", sum[0:8]) 800 if sha1sSet[sha] { 801 return "", fmt.Errorf("sha1 is not unique") 802 } 803 sha1sSet[sha] = true 804 return sha, nil 805} 806 807// getUnimplementedPlan generate the typescript code of a test plan for rules in sections[start, end] 808// then it writes the generated test plans in the given 'path' 809func getUnimplementedTestPlan(p Parser, path string) error { 810 rules := p.rules 811 start := p.firstSectionContainingRule 812 end := p.lastSectionContainingRule 813 validationPath := filepath.Join(path, "validation") 814 if err := validationTestPlan(rules, validationPath, start, end); err != nil { 815 return err 816 } 817 818 executionPath := filepath.Join(path, "execution", "builtin") 819 if err := executionTestPlan(rules, executionPath); err != nil { 820 return err 821 } 822 return nil 823} 824 825// getTestPlanFilePath returns a sort friendly path 826// example: if we have 10 sections, and generate filenames naively, this will be the sorted result: 827// section1.spec.ts -> section10.spec.ts -> section2.spec.ts -> ... 828// if we make all the section numbers have the same number of digits, we will get: 829// section01.spec.ts -> section02.spec.ts -> ... -> section10.spec.ts 830func getTestPlanFilePath(path string, x, y, digits int) (string, error) { 831 fileName := "" 832 if y != -1 { 833 // section16.01.spec.ts, ... 834 sectionFmt := fmt.Sprintf("section%%d_%%.%dd.spec.ts", digits) 835 fileName = fmt.Sprintf(sectionFmt, x, y) 836 } else { 837 // section01.spec.ts, ... 838 sectionFmt := fmt.Sprintf("section%%.%dd.spec.ts", digits) 839 fileName = fmt.Sprintf(sectionFmt, x) 840 } 841 return filepath.Join(path, fileName), nil 842 843} 844 845// validationTestPlan generates the typescript code of a test plan for rules in sections[start, end] 846func validationTestPlan(rules []rule, path string, start int, end int) error { 847 content := [][]string{} 848 filePath := []string{} 849 for section := 0; section <= end; section++ { 850 sb := strings.Builder{} 851 sectionStr := strconv.Itoa(section) 852 testDescription := "`WGSL Section " + sectionStr + " Test Plan`" 853 sb.WriteString(fmt.Sprintf(validationTestHeader, testDescription)) 854 content = append(content, []string{sb.String()}) 855 f, err := getTestPlanFilePath(path, section, -1, len(strconv.Itoa(end))) 856 if err != nil { 857 return nil 858 } 859 filePath = append(filePath, f) 860 } 861 862 for _, r := range rules { 863 sectionDims, err := parseSection(r.SubSection) 864 if err != nil || len(sectionDims) == 0 { 865 return err 866 } 867 section := sectionDims[0] 868 if section < start || section >= end { 869 continue 870 } 871 content[section] = append(content[section], testPlan(r)) 872 } 873 874 for i := start; i <= end; i++ { 875 if len(content[i]) > 1 { 876 if err := writeFile(filePath[i], strings.Join(content[i], "\n")); err != nil { 877 return err 878 } 879 } 880 } 881 return nil 882 883} 884 885// executionTestPlan generates the typescript code of a test plan for rules in the given section 886// the rules in section X.Y.* will be written to path/sectionX_Y.spec.ts 887func executionTestPlan(rules []rule, path string) error { 888 // TODO(SarahM) This generates execution tests for builtin function tests. Add other executions tests. 889 section, err := getBuiltinSectionNum(rules) 890 if err != nil { 891 return err 892 } 893 894 content := [][]string{} 895 filePath := []string{} 896 897 start, end, err := getSectionRange(rules, []int{section}) 898 if err != nil || start == -1 || end == -1 { 899 return err 900 } 901 for y := 0; y <= end; y++ { 902 fileName, err := getTestPlanFilePath(path, section, y, len(strconv.Itoa(end))) 903 if err != nil { 904 return err 905 } 906 filePath = append(filePath, fileName) 907 908 sb := strings.Builder{} 909 testDescription := fmt.Sprintf("`WGSL section %v.%v execution test`", section, y) 910 sb.WriteString(fmt.Sprintf(executionTestHeader, testDescription)) 911 content = append(content, []string{sb.String()}) 912 } 913 914 for _, r := range rules { 915 if r.Section != section || !isBuiltinFunctionRule(r) { 916 continue 917 } 918 919 index := -1 920 sectionDims, err := parseSection(r.SubSection) 921 if err != nil || len(sectionDims) == 0 { 922 return err 923 } 924 925 if len(sectionDims) == 1 { 926 // section = x 927 index = 0 928 } else { 929 // section = x.y(.z)* 930 index = sectionDims[1] 931 } 932 933 if index < 0 && index >= len(content) { 934 return fmt.Errorf("cannot append to content, index %v out of range 0..%v", 935 index, len(content)-1) 936 } 937 content[index] = append(content[index], testPlan(r)) 938 } 939 940 for i := start; i <= end; i++ { 941 // Write the file if there is a test in there 942 // compared with >1 because content has at least the test description 943 if len(content[i]) > 1 { 944 if err := writeFile(filePath[i], strings.Join(content[i], "\n")); err != nil { 945 return err 946 } 947 } 948 } 949 return nil 950} 951 952func getBuiltinSectionNum(rules []rule) (int, error) { 953 for _, r := range rules { 954 if strings.Contains(r.URL, "builtin-functions") { 955 return r.Section, nil 956 } 957 } 958 return -1, fmt.Errorf("unable to find the built-in function section") 959} 960 961func isBuiltinFunctionRule(r rule) bool { 962 _, builtinName, _ := testName(r.URL, r.Description, r.SubSection) 963 return builtinName != "" || strings.Contains(r.URL, "builtin-functions") 964} 965 966func testPlan(r rule) string { 967 sb := strings.Builder{} 968 sb.WriteString(fmt.Sprintf(unImplementedTestTemplate, r.TestName, r.Sha, r.URL, 969 "`\n"+r.Description+"\n"+howToContribute+"\n`")) 970 971 return sb.String() 972} 973 974const ( 975 validationTestHeader = `export const description = %v; 976 977import { makeTestGroup } from '../../../common/framework/test_group.js'; 978 979import { ShaderValidationTest } from './shader_validation_test.js'; 980 981export const g = makeTestGroup(ShaderValidationTest); 982` 983 executionTestHeader = `export const description = %v; 984 985import { makeTestGroup } from '../../../../common/framework/test_group.js'; 986import { GPUTest } from '../../../gpu_test.js'; 987 988export const g = makeTestGroup(GPUTest); 989` 990 unImplementedTestTemplate = `g.test('%v') 991 .uniqueId('%v') 992 .specURL('%v') 993 .desc( 994 %v 995 ) 996 .params(u => u.combine('placeHolder1', ['placeHolder2', 'placeHolder3'])) 997 .unimplemented(); 998` 999 howToContribute = ` 1000Please read the following guidelines before contributing: 1001https://github.com/gpuweb/cts/blob/main/docs/plan_autogen.md` 1002) 1003