• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/* eslint-disable no-console */
2
3import { readFileSync, createReadStream, readdirSync } from 'node:fs';
4import Benchmark from 'benchmark';
5import { loadTreeConstructionTestData } from 'parse5-test-utils/dist/generate-parsing-tests.js';
6import { loadSAXParserTestData } from 'parse5-test-utils/dist/load-sax-parser-test-data.js';
7import { treeAdapters, WritableStreamStub, finished } from 'parse5-test-utils/dist/common.js';
8import * as parse5 from '../../packages/parse5/dist/index.js';
9import { ParserStream as parse5Stream } from '../../packages/parse5-parser-stream/dist/index.js';
10import * as parse5Upstream from 'parse5';
11
12const hugePagePath = new URL('../../test/data/huge-page/huge-page.html', import.meta.url);
13const treeConstructionPath = new URL('../../test/data/html5lib-tests/tree-construction', import.meta.url);
14const saxPath = new URL('../../test/data/sax/', import.meta.url);
15
16//HACK: https://github.com/bestiejs/benchmark.js/issues/51
17/* global workingCopy, WorkingCopyParserStream, upstreamParser, hugePage, microTests, runMicro, runPages, files */
18global.workingCopy = parse5;
19global.WorkingCopyParserStream = parse5Stream;
20global.upstreamParser = parse5Upstream;
21
22// Huge page data
23global.hugePage = readFileSync(hugePagePath).toString();
24
25// Micro data
26global.microTests = loadTreeConstructionTestData(treeConstructionPath, treeAdapters.default)
27    .filter(
28        (test) =>
29            //NOTE: this test caused a stack overflow in parse5 v1.x
30            test.input !== '<button><p><button>',
31    )
32    .map((test) => ({
33        html: test.input,
34        fragmentContext: test.fragmentContext,
35    }));
36
37global.runMicro = function (parser) {
38    for (const test of microTests) {
39        if (test.fragmentContext) {
40            parser.parseFragment(test.fragmentContext, test.html);
41        } else {
42            parser.parse(test.html);
43        }
44    }
45};
46
47// Pages data
48const pages = loadSAXParserTestData().map((test) => test.src);
49
50global.runPages = function (parser) {
51    for (const page of pages) {
52        parser.parse(page);
53    }
54};
55
56// Stream data
57global.files = readdirSync(saxPath).map((dirName) => new URL(`${dirName}/src.html`, saxPath).pathname);
58
59// Utils
60function getHz(suite, testName) {
61    for (let i = 0; i < suite.length; i++) {
62        if (suite[i].name === testName) {
63            return suite[i].hz;
64        }
65    }
66}
67
68function runBench({ name, workingCopyFn, upstreamFn, defer = false }) {
69    const suite = new Benchmark.Suite(name);
70
71    suite
72        .add('Working copy', workingCopyFn, { defer })
73        .add('Upstream', upstreamFn, { defer })
74        .on('start', () => console.log(name))
75        .on('cycle', (event) => console.log(String(event.target)))
76        .on('complete', () => {
77            const workingCopyHz = getHz(suite, 'Working copy');
78            const upstreamHz = getHz(suite, 'Upstream');
79
80            if (workingCopyHz > upstreamHz) {
81                console.log(`Working copy is ${(workingCopyHz / upstreamHz).toFixed(2)}x faster.\n`);
82            } else {
83                console.log(`Working copy is ${(upstreamHz / workingCopyHz).toFixed(2)}x slower.\n`);
84            }
85        })
86        .run();
87}
88
89// Benchmarks
90runBench({
91    name: 'parse5 regression benchmark - MICRO',
92    workingCopyFn: () => runMicro(workingCopy),
93    upstreamFn: () => runMicro(upstreamParser),
94});
95
96runBench({
97    name: 'parse5 regression benchmark - HUGE',
98    workingCopyFn: () => workingCopy.parse(hugePage),
99    upstreamFn: () => upstreamParser.parse(hugePage),
100});
101
102runBench({
103    name: 'parse5 regression benchmark - PAGES',
104    workingCopyFn: () => runPages(workingCopy),
105    upstreamFn: () => runPages(upstreamParser),
106});
107
108runBench({
109    name: 'parse5 regression benchmark - STREAM',
110    defer: true,
111    workingCopyFn: async (deferred) => {
112        const parsePromises = files.map((fileName) => {
113            const stream = createReadStream(fileName, 'utf8');
114            const parserStream = new WorkingCopyParserStream();
115
116            stream.pipe(parserStream);
117            return finished(parserStream);
118        });
119
120        await Promise.all(parsePromises);
121        deferred.resolve();
122    },
123    upstreamFn: async (deferred) => {
124        const parsePromises = files.map(async (fileName) => {
125            const stream = createReadStream(fileName, 'utf8');
126            const writable = new WritableStreamStub();
127
128            stream.pipe(writable);
129
130            await finished(writable);
131
132            upstreamParser.parse(writable.writtenData);
133        });
134
135        await Promise.all(parsePromises);
136        deferred.resolve();
137    },
138});
139