Skip to content

Commit 5363ba3

Browse files
paulirishbrendankenny
authored andcommitted
add streaming json parser
1 parent 7d5e06e commit 5363ba3

File tree

4 files changed

+170
-4
lines changed

4 files changed

+170
-4
lines changed

lighthouse-core/gather/driver.js

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ const emulation = require('../lib/emulation');
1010
const Element = require('../lib/element');
1111
const EventEmitter = require('events').EventEmitter;
1212
const URL = require('../lib/url-shim');
13+
const TraceParser = require('../lib/traces/trace-parser');
1314

1415
const log = require('lighthouse-logger');
1516
const DevtoolsLog = require('./devtools-log');
@@ -25,7 +26,7 @@ const _uniq = arr => Array.from(new Set(arr));
2526

2627
class Driver {
2728
static get MAX_WAIT_FOR_FULLY_LOADED() {
28-
return 60 * 1000;
29+
return 30 * 1000;
2930
}
3031

3132
/**
@@ -801,7 +802,7 @@ class Driver {
801802
_readTraceFromStream(streamHandle) {
802803
return new Promise((resolve, reject) => {
803804
let isEOF = false;
804-
let result = '';
805+
const parser = new TraceParser();
805806

806807
const readArguments = {
807808
handle: streamHandle.stream
@@ -812,11 +813,11 @@ class Driver {
812813
return;
813814
}
814815

815-
result += response.data;
816+
parser.parseChunk(response.data);
816817

817818
if (response.eof) {
818819
isEOF = true;
819-
return resolve(JSON.parse(result));
820+
return resolve(parser.getTrace());
820821
}
821822

822823
return this.sendCommand('IO.read', readArguments).then(onChunkRead);
Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
/**
2+
* @license Copyright 2017 Google Inc. All Rights Reserved.
3+
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
4+
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
5+
*/
6+
'use strict';
7+
8+
const WebInspector = require('../web-inspector');
9+
10+
/**
11+
* Traces > 256MB hit limits in V8, so TraceParser will parse the trace events stream as it's
12+
* received. We use DevTools' TimelineLoader for the heavy lifting, as it has a fast trace-specific
13+
* streaming JSON parser.
14+
* The resulting trace doesn't include the "metadata" property, as it's excluded via DevTools'
15+
* implementation.
16+
*/
17+
class TraceParser {
18+
constructor() {
19+
this.traceEvents = [];
20+
21+
this.tracingModel = {
22+
reset: _ => this._reset(),
23+
addEvents: evts => this._addEvents(evts),
24+
};
25+
26+
const delegateMock = {
27+
loadingProgress: _ => {},
28+
loadingStarted: _ => {},
29+
loadingComplete: success => {
30+
if (!success) throw new Error('Parsing problem');
31+
}
32+
};
33+
this.loader = new WebInspector.TimelineLoader(this.tracingModel, delegateMock);
34+
}
35+
36+
/**
37+
* Reset the trace events array
38+
*/
39+
_reset() {
40+
this.traceEvents = [];
41+
}
42+
43+
/**
44+
* Adds parsed trace events to array
45+
* @param {!Array<!TraceEvent>} evts
46+
*/
47+
_addEvents(evts) {
48+
this.traceEvents.push(...evts);
49+
}
50+
51+
/**
52+
* Receive chunk of streamed trace
53+
* @param {string} data
54+
*/
55+
parseChunk(data) {
56+
this.loader.write(data);
57+
}
58+
59+
/**
60+
* Returns entire trace
61+
* @return {{traceEvents: !Array<!TraceEvent>}}
62+
*/
63+
getTrace() {
64+
return {
65+
traceEvents: this.traceEvents
66+
};
67+
}
68+
}
69+
70+
module.exports = TraceParser;

lighthouse-core/lib/web-inspector.js

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -176,6 +176,11 @@ module.exports = (function() {
176176
require('chrome-devtools-frontend/front_end/timeline_model/TimelineModel.js');
177177
require('chrome-devtools-frontend/front_end/ui_lazy/SortableDataGrid.js');
178178
require('chrome-devtools-frontend/front_end/timeline/TimelineTreeView.js');
179+
180+
// used for streaming json parsing
181+
require('chrome-devtools-frontend/front_end/common/TextUtils.js');
182+
require('chrome-devtools-frontend/front_end/timeline/TimelineLoader.js');
183+
179184
require('chrome-devtools-frontend/front_end/timeline_model/TimelineProfileTree.js');
180185
require('chrome-devtools-frontend/front_end/components_lazy/FilmStripModel.js');
181186
require('chrome-devtools-frontend/front_end/timeline_model/TimelineIRModel.js');
Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
/**
2+
* @license Copyright 2017 Google Inc. All Rights Reserved.
3+
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
4+
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
5+
*/
6+
'use strict';
7+
8+
const TraceParser = require('../../../lib/traces/trace-parser');
9+
const fs = require('fs');
10+
const assert = require('assert');
11+
12+
13+
/* eslint-env mocha */
14+
describe('traceParser parser', () => {
15+
it('returns preact trace data the same as JSON.parse', (done) => {
16+
const filename = '../../fixtures/traces/progressive-app-m60.json';
17+
const readStream = fs.createReadStream(__dirname + '/' + filename, {
18+
encoding: 'utf-8',
19+
// devtools sends traces in 10mb chunks, but this trace is 12MB so we'll do a few chunks
20+
highWaterMark: 4 * 1024 * 1024
21+
});
22+
const parser = new TraceParser();
23+
24+
readStream.on('data', (chunk) => {
25+
parser.parseChunk(chunk);
26+
});
27+
readStream.on('end', () => {
28+
const streamedTrace = parser.getTrace();
29+
const readTrace = require(filename);
30+
31+
assert.equal(streamedTrace.traceEvents.length, readTrace.traceEvents.length);
32+
assert.deepStrictEqual(streamedTrace.traceEvents, readTrace.traceEvents);
33+
34+
done();
35+
});
36+
});
37+
38+
39+
it('parses a trace > 256mb (slow)', () => {
40+
const parser = new TraceParser();
41+
let bytesRead = 0;
42+
// FYI: this trace doesn't have a traceEvents property ;)
43+
const events = require('../../fixtures/traces/devtools-homepage-w-screenshots-trace.json');
44+
45+
/**
46+
* This function will synthesize a trace that's over 256 MB. To do that, we'll take an existing
47+
* trace and repeat the same events again and again until we've gone over 256 MB.
48+
* Note: We repeat all but the last event, as it's the CpuProfile event, and it triggers
49+
* specific handling in the devtools streaming parser.
50+
* Once we reach > 256 MB, we add in the CpuProfile event.
51+
*/
52+
function buildAndParse256mbTrace() {
53+
const stripOuterBrackets = str => str.replace(/^\[/, '').replace(/\]$/, '');
54+
const partialEventsStr = events => stripOuterBrackets(JSON.stringify(events));
55+
const traceEventsStr = partialEventsStr(events.slice(0, events.length-2)) + ',';
56+
57+
// read the trace intro
58+
parser.parseChunk(`{"traceEvents": [${traceEventsStr}`);
59+
bytesRead += traceEventsStr.length;
60+
61+
// just keep reading until we've gone over 256 MB
62+
// 256 MB is hard limit of a string in v8
63+
// https://mobile.twitter.com/bmeurer/status/879276976523157505
64+
while (bytesRead <= (Math.pow(2, 28)) - 16) {
65+
parser.parseChunk(traceEventsStr);
66+
bytesRead += traceEventsStr.length;
67+
}
68+
69+
// the CPU Profiler event is last (and big), inject it just once
70+
const lastEventStr = partialEventsStr(events.slice(-1));
71+
parser.parseChunk(lastEventStr + ']}');
72+
bytesRead += lastEventStr.length;
73+
}
74+
75+
buildAndParse256mbTrace();
76+
const streamedTrace = parser.getTrace();
77+
78+
assert.ok(bytesRead > 256 * 1024 * 1024, `${bytesRead} bytes read`);
79+
assert.strictEqual(bytesRead, 270179102, `${bytesRead} bytes read`);
80+
81+
// if > 256 MB are read we should have ~480,000 trace events
82+
assert.ok(streamedTrace.traceEvents.length > 400 * 1000, 'not >400,000 trace events');
83+
assert.ok(streamedTrace.traceEvents.length > events.length * 5, 'not way more trace events');
84+
assert.strictEqual(streamedTrace.traceEvents.length, 480151);
85+
86+
assert.deepStrictEqual(
87+
streamedTrace.traceEvents[events.length - 2],
88+
events[0]);
89+
});
90+
});

0 commit comments

Comments
 (0)