it('should describe itself based on the perfLogFeatrues', () => { expect(sortedKeys(createMetric([[]], new PerfLogFeatures()).describe())) .toEqual(['pureScriptTime', 'scriptTime']); expect( sortedKeys(createMetric([[]], new PerfLogFeatures({render: true, gc: false})).describe())) .toEqual(['pureScriptTime', 'renderTime', 'scriptTime']); expect(sortedKeys(createMetric([[]], null).describe())) .toEqual( ['gcAmount', 'gcTime', 'majorGcTime', 'pureScriptTime', 'renderTime', 'scriptTime']); expect(sortedKeys( createMetric([[]], new PerfLogFeatures({render: true, gc: true}), {forceGc: true}) .describe())) .toEqual([ 'forcedGcAmount', 'forcedGcTime', 'gcAmount', 'gcTime', 'majorGcTime', 'pureScriptTime', 'renderTime', 'scriptTime' ]); expect(sortedKeys(createMetric([[]], new PerfLogFeatures({userTiming: true}), {receivedData: true, requestCount: true}) .describe())) .toEqual(['pureScriptTime', 'receivedData', 'requestCount', 'scriptTime']); });
it('should return the last sampleSize runs when it has at least the given size', () => { createValidator(2); var sample = [mv(0, 0, {'a': 1}), mv(1, 1, {'b': 2}), mv(2, 2, {'c': 3})]; expect(validator.validate(ListWrapper.slice(sample, 0, 2))) .toEqual(ListWrapper.slice(sample, 0, 2)); expect(validator.validate(sample)).toEqual(ListWrapper.slice(sample, 1, 3)); });
it('should return the last sampleSize runs when the regression slope is >0', () => { createValidator({size: 2, metric: 'script'}); var sample = [mv(0, 0, {'script': 1}), mv(1, 1, {'script': 2}), mv(2, 2, {'script': 3})]; expect(validator.validate(ListWrapper.slice(sample, 0, 2))) .toEqual(ListWrapper.slice(sample, 0, 2)); expect(validator.validate(sample)).toEqual(ListWrapper.slice(sample, 1, 3)); });
.then((events) => { expect(events.length).toEqual(2); expect(events[0]) .toEqual(normEvents.start('gc', 1.0, {'usedHeapSize': 1000, 'majorGc': true})); expect(events[1]) .toEqual(normEvents.end('gc', 2.0, {'usedHeapSize': 0, 'majorGc': true})); async.done(); });
it('should describe itself if frame capture is requested and not available', () => { var description = createMetric([[]], new PerfLogFeatures({frameCapture: false}), {captureFrames: true}) .describe(); expect(description['frameTime.mean']).toContain('WARNING'); expect(description['frameTime.best']).toContain('WARNING'); expect(description['frameTime.worst']).toContain('WARNING'); expect(description['frameTime.smooth']).toContain('WARNING'); });
.then((data) => { expect(commandLog) .toEqual([ ['timeBegin', 'benchpress0'], ['timeEnd', 'benchpress0', null], 'readPerfLog' ]); expect(data['scriptTime']).toBe(2); async.done(); });
.then((events) => { expect(events).toEqual([ normEvents.start('gc', 1.0, {'usedHeapSize': 1000}), normEvents.end('gc', 2.0, {'usedHeapSize': 0, 'majorGc': true}), ]); async.done(); });
.then((events) => { expect(events).toEqual([ normEvents.instant('sendRequest', 1.234, {'url': 'http://here', 'method': 'GET'}) ]); async.done(); });
PromiseWrapper.catchError(aggregate([eventFactory.markStart('frameCapture', 3)]), (err) => { expect(() => { throw err; }) .toThrowError( 'found start event for frame capture, but frame capture was not requested in benchpress'); async.done(); return null; });
.then((data) => { expect(commandLog) .toEqual([ ['gc'], ['timeBegin', 'benchpress0'], ['timeEnd', 'benchpress0', 'benchpress1'], 'readPerfLog', ['gc'], ['timeEnd', 'benchpress1', null], 'readPerfLog' ]); expect(data['forcedGcTime']).toBe(3); expect(data['forcedGcAmount']).toBe(1.5); async.done(); });