chore: merge traces into a single zip file (#21242)
This commit is contained in:
Родитель
cb5a4845b3
Коммит
81bd637d94
|
@ -14,8 +14,12 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import type EventEmitter from 'events';
|
||||
import type { ClientSideCallMetadata } from '@protocol/channels';
|
||||
import type { SerializedClientSideCallMetadata, SerializedStack, SerializedStackFrame } from '@trace/traceUtils';
|
||||
import { yazl, yauzl } from '../zipBundle';
|
||||
import { ManualPromise } from './manualPromise';
|
||||
|
||||
export function serializeClientSideCallMetadata(metadatas: ClientSideCallMetadata[]): SerializedClientSideCallMetadata {
|
||||
const fileNames = new Map<string, number>();
|
||||
|
@ -37,3 +41,54 @@ export function serializeClientSideCallMetadata(metadatas: ClientSideCallMetadat
|
|||
}
|
||||
return { files: [...fileNames.keys()], stacks };
|
||||
}
|
||||
|
||||
export async function mergeTraceFiles(fileName: string, temporaryTraceFiles: string[]) {
|
||||
if (temporaryTraceFiles.length === 1) {
|
||||
await fs.promises.rename(temporaryTraceFiles[0], fileName);
|
||||
return;
|
||||
}
|
||||
|
||||
const mergePromise = new ManualPromise();
|
||||
const zipFile = new yazl.ZipFile();
|
||||
const entryNames = new Set<string>();
|
||||
(zipFile as any as EventEmitter).on('error', error => mergePromise.reject(error));
|
||||
|
||||
for (let i = 0; i < temporaryTraceFiles.length; ++i) {
|
||||
const tempFile = temporaryTraceFiles[i];
|
||||
const promise = new ManualPromise<void>();
|
||||
yauzl.open(tempFile, (err, inZipFile) => {
|
||||
if (err) {
|
||||
promise.reject(err);
|
||||
return;
|
||||
}
|
||||
let pendingEntries = inZipFile.entryCount;
|
||||
inZipFile.on('entry', entry => {
|
||||
let entryName = entry.fileName;
|
||||
if (entry.fileName.startsWith('trace.'))
|
||||
entryName = i + '-' + entry.fileName;
|
||||
inZipFile.openReadStream(entry, (err, readStream) => {
|
||||
if (err) {
|
||||
promise.reject(err);
|
||||
return;
|
||||
}
|
||||
if (!entryNames.has(entryName)) {
|
||||
entryNames.add(entryName);
|
||||
zipFile.addReadStream(readStream!, entryName);
|
||||
}
|
||||
if (--pendingEntries === 0)
|
||||
promise.resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
await promise;
|
||||
}
|
||||
|
||||
zipFile.end(undefined, () => {
|
||||
zipFile.outputStream.pipe(fs.createWriteStream(fileName)).on('close', () => {
|
||||
Promise.all(temporaryTraceFiles.map(tempFile => fs.promises.unlink(tempFile))).then(() => {
|
||||
mergePromise.resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
await mergePromise;
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@ import * as fs from 'fs';
|
|||
import * as path from 'path';
|
||||
import type { APIRequestContext, BrowserContext, BrowserContextOptions, LaunchOptions, Page, Tracing, Video } from 'playwright-core';
|
||||
import * as playwrightLibrary from 'playwright-core';
|
||||
import { createGuid, debugMode, removeFolders, addInternalStackPrefix } from 'playwright-core/lib/utils';
|
||||
import { createGuid, debugMode, removeFolders, addInternalStackPrefix, mergeTraceFiles } from 'playwright-core/lib/utils';
|
||||
import type { Fixtures, PlaywrightTestArgs, PlaywrightTestOptions, PlaywrightWorkerArgs, PlaywrightWorkerOptions, ScreenshotMode, TestInfo, TestType, TraceMode, VideoMode } from '../types/test';
|
||||
import type { TestInfoImpl } from './worker/testInfo';
|
||||
import { rootTestType } from './common/testType';
|
||||
|
@ -428,15 +428,11 @@ const playwrightFixtures: Fixtures<TestFixtures, WorkerFixtures> = ({
|
|||
|
||||
// 6. Either remove or attach temporary traces and screenshots for contexts closed
|
||||
// before the test has finished.
|
||||
await Promise.all(temporaryTraceFiles.map(async (file, i) => {
|
||||
if (preserveTrace) {
|
||||
const tracePath = testInfo.outputPath(`trace${i ? '-' + i : ''}.zip`);
|
||||
await fs.promises.rename(file, tracePath).catch(() => {});
|
||||
testInfo.attachments.push({ name: 'trace', path: tracePath, contentType: 'application/zip' });
|
||||
} else {
|
||||
await fs.promises.unlink(file).catch(() => {});
|
||||
}
|
||||
}));
|
||||
if (preserveTrace && temporaryTraceFiles.length) {
|
||||
const tracePath = testInfo.outputPath(`trace.zip`);
|
||||
await mergeTraceFiles(tracePath, temporaryTraceFiles);
|
||||
testInfo.attachments.push({ name: 'trace', path: tracePath, contentType: 'application/zip' });
|
||||
}
|
||||
await Promise.all(temporaryScreenshots.map(async file => {
|
||||
if (captureScreenshots)
|
||||
await fs.promises.rename(file, addScreenshotAttachment()).catch(() => {});
|
||||
|
|
|
@ -76,12 +76,12 @@ async function doFetch(event: FetchEvent): Promise<Response> {
|
|||
const traceUrl = url.searchParams.get('trace')!;
|
||||
const { snapshotServer } = loadedTraces.get(traceUrl) || {};
|
||||
|
||||
if (relativePath === '/context') {
|
||||
if (relativePath === '/contexts') {
|
||||
try {
|
||||
const traceModel = await loadTrace(traceUrl, url.searchParams.get('traceFileName'), event.clientId, (done: number, total: number) => {
|
||||
client.postMessage({ method: 'progress', params: { done, total } });
|
||||
});
|
||||
return new Response(JSON.stringify(traceModel!.contextEntry), {
|
||||
return new Response(JSON.stringify(traceModel!.contextEntries), {
|
||||
status: 200,
|
||||
headers: { 'Content-Type': 'application/json' }
|
||||
});
|
||||
|
|
|
@ -27,7 +27,7 @@ import { BaseSnapshotStorage } from './snapshotStorage';
|
|||
const zipjs = zipImport as typeof zip;
|
||||
|
||||
export class TraceModel {
|
||||
contextEntry: ContextEntry;
|
||||
contextEntries: ContextEntry[] = [];
|
||||
pageEntries = new Map<string, PageEntry>();
|
||||
private _snapshotStorage: PersistentSnapshotStorage | undefined;
|
||||
private _entries = new Map<string, zip.Entry>();
|
||||
|
@ -35,7 +35,6 @@ export class TraceModel {
|
|||
private _zipReader: zip.ZipReader | undefined;
|
||||
|
||||
constructor() {
|
||||
this.contextEntry = createEmptyContext();
|
||||
}
|
||||
|
||||
private _formatUrl(trace: string) {
|
||||
|
@ -47,50 +46,54 @@ export class TraceModel {
|
|||
}
|
||||
|
||||
async load(traceURL: string, progress: (done: number, total: number) => void) {
|
||||
this.contextEntry.traceUrl = traceURL;
|
||||
this._zipReader = new zipjs.ZipReader( // @ts-ignore
|
||||
new zipjs.HttpReader(this._formatUrl(traceURL), { mode: 'cors', preventHeadRequest: true }),
|
||||
{ useWebWorkers: false }) as zip.ZipReader;
|
||||
let traceEntry: zip.Entry | undefined;
|
||||
let networkEntry: zip.Entry | undefined;
|
||||
let stacksEntry: zip.Entry | undefined;
|
||||
|
||||
const ordinals: string[] = [];
|
||||
let hasSource = false;
|
||||
for (const entry of await this._zipReader.getEntries({ onprogress: progress })) {
|
||||
if (entry.filename.endsWith('.trace'))
|
||||
traceEntry = entry;
|
||||
if (entry.filename.endsWith('.network'))
|
||||
networkEntry = entry;
|
||||
if (entry.filename.endsWith('.stacks'))
|
||||
stacksEntry = entry;
|
||||
const match = entry.filename.match(/([\d]+-)?trace\.trace/);
|
||||
if (match)
|
||||
ordinals.push(match[1] || '');
|
||||
if (entry.filename.includes('src@'))
|
||||
this.contextEntry.hasSource = true;
|
||||
hasSource = true;
|
||||
this._entries.set(entry.filename, entry);
|
||||
}
|
||||
if (!traceEntry)
|
||||
if (!ordinals.length)
|
||||
throw new Error('Cannot find .trace file');
|
||||
|
||||
this._snapshotStorage = new PersistentSnapshotStorage(this._entries);
|
||||
|
||||
const traceWriter = new zipjs.TextWriter() as zip.TextWriter;
|
||||
await traceEntry.getData!(traceWriter);
|
||||
for (const line of (await traceWriter.getData()).split('\n'))
|
||||
this.appendEvent(line);
|
||||
for (const ordinal of ordinals) {
|
||||
const contextEntry = createEmptyContext();
|
||||
contextEntry.traceUrl = traceURL;
|
||||
contextEntry.hasSource = hasSource;
|
||||
|
||||
const traceWriter = new zipjs.TextWriter() as zip.TextWriter;
|
||||
const traceEntry = this._entries.get(ordinal + 'trace.trace')!;
|
||||
await traceEntry!.getData!(traceWriter);
|
||||
for (const line of (await traceWriter.getData()).split('\n'))
|
||||
this.appendEvent(contextEntry, line);
|
||||
|
||||
if (networkEntry) {
|
||||
const networkWriter = new zipjs.TextWriter();
|
||||
await networkEntry.getData!(networkWriter);
|
||||
const networkEntry = this._entries.get(ordinal + 'trace.network')!;
|
||||
await networkEntry?.getData?.(networkWriter);
|
||||
for (const line of (await networkWriter.getData()).split('\n'))
|
||||
this.appendEvent(line);
|
||||
}
|
||||
this.appendEvent(contextEntry, line);
|
||||
|
||||
if (stacksEntry) {
|
||||
const writer = new zipjs.TextWriter();
|
||||
await stacksEntry.getData!(writer);
|
||||
const metadataMap = parseClientSideCallMetadata(JSON.parse(await writer.getData()));
|
||||
for (const action of this.contextEntry.actions)
|
||||
action.stack = action.stack || metadataMap.get(action.callId);
|
||||
}
|
||||
const stacksWriter = new zipjs.TextWriter();
|
||||
const stacksEntry = this._entries.get(ordinal + 'trace.stacks');
|
||||
if (stacksEntry) {
|
||||
await stacksEntry!.getData!(stacksWriter);
|
||||
const stacks = parseClientSideCallMetadata(JSON.parse(await stacksWriter.getData()));
|
||||
for (const action of contextEntry.actions)
|
||||
action.stack = action.stack || stacks.get(action.callId);
|
||||
}
|
||||
|
||||
this._build();
|
||||
contextEntry.actions.sort((a1, a2) => a1.startTime - a2.startTime);
|
||||
this.contextEntries.push(contextEntry);
|
||||
}
|
||||
}
|
||||
|
||||
async hasEntry(filename: string): Promise<boolean> {
|
||||
|
@ -116,24 +119,19 @@ export class TraceModel {
|
|||
return this._snapshotStorage!;
|
||||
}
|
||||
|
||||
private _build() {
|
||||
this.contextEntry!.actions.sort((a1, a2) => a1.startTime - a2.startTime);
|
||||
this.contextEntry!.resources = this._snapshotStorage!.resources();
|
||||
}
|
||||
|
||||
private _pageEntry(pageId: string): PageEntry {
|
||||
private _pageEntry(contextEntry: ContextEntry, pageId: string): PageEntry {
|
||||
let pageEntry = this.pageEntries.get(pageId);
|
||||
if (!pageEntry) {
|
||||
pageEntry = {
|
||||
screencastFrames: [],
|
||||
};
|
||||
this.pageEntries.set(pageId, pageEntry);
|
||||
this.contextEntry.pages.push(pageEntry);
|
||||
contextEntry.pages.push(pageEntry);
|
||||
}
|
||||
return pageEntry;
|
||||
}
|
||||
|
||||
appendEvent(line: string) {
|
||||
appendEvent(contextEntry: ContextEntry, line: string) {
|
||||
if (!line)
|
||||
return;
|
||||
const event = this._modernize(JSON.parse(line));
|
||||
|
@ -141,49 +139,50 @@ export class TraceModel {
|
|||
return;
|
||||
switch (event.type) {
|
||||
case 'context-options': {
|
||||
this.contextEntry.browserName = event.browserName;
|
||||
this.contextEntry.title = event.title;
|
||||
this.contextEntry.platform = event.platform;
|
||||
this.contextEntry.wallTime = event.wallTime;
|
||||
this.contextEntry.sdkLanguage = event.sdkLanguage;
|
||||
this.contextEntry.options = event.options;
|
||||
this.contextEntry.testIdAttributeName = event.testIdAttributeName;
|
||||
contextEntry.browserName = event.browserName;
|
||||
contextEntry.title = event.title;
|
||||
contextEntry.platform = event.platform;
|
||||
contextEntry.wallTime = event.wallTime;
|
||||
contextEntry.sdkLanguage = event.sdkLanguage;
|
||||
contextEntry.options = event.options;
|
||||
contextEntry.testIdAttributeName = event.testIdAttributeName;
|
||||
break;
|
||||
}
|
||||
case 'screencast-frame': {
|
||||
this._pageEntry(event.pageId).screencastFrames.push(event);
|
||||
this._pageEntry(contextEntry, event.pageId).screencastFrames.push(event);
|
||||
break;
|
||||
}
|
||||
case 'action': {
|
||||
this.contextEntry!.actions.push(event);
|
||||
contextEntry!.actions.push(event);
|
||||
break;
|
||||
}
|
||||
case 'event': {
|
||||
this.contextEntry!.events.push(event);
|
||||
contextEntry!.events.push(event);
|
||||
break;
|
||||
}
|
||||
case 'object': {
|
||||
this.contextEntry!.initializers[event.guid] = event.initializer;
|
||||
contextEntry!.initializers[event.guid] = event.initializer;
|
||||
break;
|
||||
}
|
||||
case 'resource-snapshot':
|
||||
this._snapshotStorage!.addResource(event.snapshot);
|
||||
contextEntry.resources.push(event.snapshot);
|
||||
break;
|
||||
case 'frame-snapshot':
|
||||
this._snapshotStorage!.addFrameSnapshot(event.snapshot);
|
||||
break;
|
||||
}
|
||||
if (event.type === 'action') {
|
||||
this.contextEntry!.startTime = Math.min(this.contextEntry!.startTime, event.startTime);
|
||||
this.contextEntry!.endTime = Math.max(this.contextEntry!.endTime, event.endTime);
|
||||
contextEntry.startTime = Math.min(contextEntry.startTime, event.startTime);
|
||||
contextEntry.endTime = Math.max(contextEntry.endTime, event.endTime);
|
||||
}
|
||||
if (event.type === 'event') {
|
||||
this.contextEntry!.startTime = Math.min(this.contextEntry!.startTime, event.time);
|
||||
this.contextEntry!.endTime = Math.max(this.contextEntry!.endTime, event.time);
|
||||
contextEntry.startTime = Math.min(contextEntry.startTime, event.time);
|
||||
contextEntry.endTime = Math.max(contextEntry.endTime, event.time);
|
||||
}
|
||||
if (event.type === 'screencast-frame') {
|
||||
this.contextEntry!.startTime = Math.min(this.contextEntry!.startTime, event.timestamp);
|
||||
this.contextEntry!.endTime = Math.max(this.contextEntry!.endTime, event.timestamp);
|
||||
contextEntry.startTime = Math.min(contextEntry.startTime, event.timestamp);
|
||||
contextEntry.endTime = Math.max(contextEntry.endTime, event.timestamp);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -206,7 +205,7 @@ export class TraceModel {
|
|||
_modernize_1_to_2(event: any): any {
|
||||
if (event.type === 'frame-snapshot' && event.snapshot.isMainFrame) {
|
||||
// Old versions had completely wrong viewport.
|
||||
event.snapshot.viewport = this.contextEntry.options.viewport || { width: 1280, height: 720 };
|
||||
event.snapshot.viewport = this.contextEntries[0]?.options?.viewport || { width: 1280, height: 720 };
|
||||
}
|
||||
return event;
|
||||
}
|
||||
|
|
|
@ -56,7 +56,9 @@ export const SourceTab: React.FunctionComponent<{
|
|||
if (typeof stackInfo === 'string') {
|
||||
value = stackInfo;
|
||||
} else {
|
||||
const filePath = stackInfo.frames[selectedFrame].file;
|
||||
const filePath = stackInfo.frames[selectedFrame]?.file;
|
||||
if (!filePath)
|
||||
return '';
|
||||
if (!stackInfo.fileContent.has(filePath)) {
|
||||
const sha1 = await calculateSha1(filePath);
|
||||
stackInfo.fileContent.set(filePath, await fetch(`sha1/src@${sha1}.txt`).then(response => response.text()).catch(e => `<Unable to read "${filePath}">`));
|
||||
|
|
|
@ -110,14 +110,13 @@ export const WorkbenchLoader: React.FunctionComponent<{
|
|||
params.set('trace', url);
|
||||
if (uploadedTraceNames.length)
|
||||
params.set('traceFileName', uploadedTraceNames[i]);
|
||||
const response = await fetch(`context?${params.toString()}`);
|
||||
const response = await fetch(`contexts?${params.toString()}`);
|
||||
if (!response.ok) {
|
||||
setTraceURLs([]);
|
||||
setProcessingErrorMessage((await response.json()).error);
|
||||
return;
|
||||
}
|
||||
const contextEntry = await response.json() as ContextEntry;
|
||||
contextEntries.push(contextEntry);
|
||||
contextEntries.push(...(await response.json()));
|
||||
}
|
||||
navigator.serviceWorker.removeEventListener('message', swListener);
|
||||
const model = new MultiTraceModel(contextEntries);
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
|
||||
import type { Frame, Page } from 'playwright-core';
|
||||
import { ZipFile } from '../../packages/playwright-core/lib/utils/zipFile';
|
||||
import type { StackFrame } from '@protocol/channels';
|
||||
import type { StackFrame } from '../../packages/protocol/src/channels';
|
||||
import { parseClientSideCallMetadata } from '../../packages/trace/src/traceUtils';
|
||||
import type { ActionTraceEvent } from '../../packages/trace/src/trace';
|
||||
|
||||
|
@ -102,17 +102,26 @@ export async function parseTrace(file: string): Promise<{ events: any[], resourc
|
|||
zipFS.close();
|
||||
|
||||
const events: any[] = [];
|
||||
for (const line of resources.get('trace.trace')!.toString().split('\n')) {
|
||||
if (line)
|
||||
events.push(JSON.parse(line));
|
||||
for (const traceFile of [...resources.keys()].filter(name => name.endsWith('.trace'))) {
|
||||
for (const line of resources.get(traceFile)!.toString().split('\n')) {
|
||||
if (line)
|
||||
events.push(JSON.parse(line));
|
||||
}
|
||||
}
|
||||
|
||||
for (const line of resources.get('trace.network')!.toString().split('\n')) {
|
||||
if (line)
|
||||
events.push(JSON.parse(line));
|
||||
for (const networkFile of [...resources.keys()].filter(name => name.endsWith('.network'))) {
|
||||
for (const line of resources.get(networkFile)!.toString().split('\n')) {
|
||||
if (line)
|
||||
events.push(JSON.parse(line));
|
||||
}
|
||||
}
|
||||
|
||||
const stacks: Map<string, StackFrame[]> = new Map();
|
||||
for (const stacksFile of [...resources.keys()].filter(name => name.endsWith('.stacks'))) {
|
||||
for (const [key, value] of parseClientSideCallMetadata(JSON.parse(resources.get(stacksFile)!.toString())))
|
||||
stacks.set(key, value);
|
||||
}
|
||||
|
||||
const stacks = parseClientSideCallMetadata(JSON.parse(resources.get('trace.stacks')!.toString()));
|
||||
return {
|
||||
events,
|
||||
resources,
|
||||
|
|
|
@ -244,10 +244,8 @@ test('should work with trace: on', async ({ runInlineTest }, testInfo) => {
|
|||
'artifacts-shared-shared-passing',
|
||||
' trace.zip',
|
||||
'artifacts-two-contexts',
|
||||
' trace-1.zip',
|
||||
' trace.zip',
|
||||
'artifacts-two-contexts-failing',
|
||||
' trace-1.zip',
|
||||
' trace.zip',
|
||||
]);
|
||||
});
|
||||
|
@ -273,7 +271,6 @@ test('should work with trace: retain-on-failure', async ({ runInlineTest }, test
|
|||
'artifacts-shared-shared-failing',
|
||||
' trace.zip',
|
||||
'artifacts-two-contexts-failing',
|
||||
' trace-1.zip',
|
||||
' trace.zip',
|
||||
]);
|
||||
});
|
||||
|
@ -299,7 +296,6 @@ test('should work with trace: on-first-retry', async ({ runInlineTest }, testInf
|
|||
'artifacts-shared-shared-failing-retry1',
|
||||
' trace.zip',
|
||||
'artifacts-two-contexts-failing-retry1',
|
||||
' trace-1.zip',
|
||||
' trace.zip',
|
||||
]);
|
||||
});
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
*/
|
||||
|
||||
import { test, expect } from './playwright-test-fixtures';
|
||||
const { ZipFile } = require('../../packages/playwright-core/lib/utils');
|
||||
import { parseTrace } from '../config/utils';
|
||||
import fs from 'fs';
|
||||
|
||||
test('should stop tracing with trace: on-first-retry, when not retrying', async ({ runInlineTest }, testInfo) => {
|
||||
|
@ -84,14 +84,12 @@ test('should record api trace', async ({ runInlineTest, server }, testInfo) => {
|
|||
expect(result.passed).toBe(2);
|
||||
expect(result.failed).toBe(1);
|
||||
// One trace file for request context and one for each APIRequestContext
|
||||
expect(fs.existsSync(testInfo.outputPath('test-results', 'a-pass', 'trace.zip'))).toBeTruthy();
|
||||
expect(fs.existsSync(testInfo.outputPath('test-results', 'a-pass', 'trace-1.zip'))).toBeTruthy();
|
||||
expect(fs.existsSync(testInfo.outputPath('test-results', 'a-api-pass', 'trace.zip'))).toBeTruthy();
|
||||
expect(fs.existsSync(testInfo.outputPath('test-results', 'a-api-pass', 'trace-1.zip'))).toBeFalsy();
|
||||
expect(fs.existsSync(testInfo.outputPath('test-results', 'a-fail', 'trace.zip'))).toBeTruthy();
|
||||
expect(fs.existsSync(testInfo.outputPath('test-results', 'a-fail', 'trace-1.zip'))).toBeTruthy();
|
||||
// One leftover global APIRequestContext from 'api pass' test.
|
||||
expect(fs.existsSync(testInfo.outputPath('test-results', 'a-fail', 'trace-2.zip'))).toBeTruthy();
|
||||
const trace1 = await parseTrace(testInfo.outputPath('test-results', 'a-pass', 'trace.zip'));
|
||||
expect(trace1.actions).toEqual(['browserContext.newPage', 'page.goto', 'apiRequestContext.get']);
|
||||
const trace2 = await parseTrace(testInfo.outputPath('test-results', 'a-api-pass', 'trace.zip'));
|
||||
expect(trace2.actions).toEqual(['apiRequestContext.get']);
|
||||
const trace3 = await parseTrace(testInfo.outputPath('test-results', 'a-fail', 'trace.zip'));
|
||||
expect(trace3.actions).toEqual(['browserContext.newPage', 'page.goto', 'apiRequestContext.get']);
|
||||
});
|
||||
|
||||
|
||||
|
@ -149,8 +147,8 @@ test('should save sources when requested', async ({ runInlineTest }, testInfo) =
|
|||
`,
|
||||
}, { workers: 1 });
|
||||
expect(result.exitCode).toEqual(0);
|
||||
const resources = await parseTrace(testInfo.outputPath('test-results', 'a-pass', 'trace.zip'));
|
||||
expect([...resources.keys()].filter(f => f.includes('src@'))).toHaveLength(1);
|
||||
const { resources } = await parseTrace(testInfo.outputPath('test-results', 'a-pass', 'trace.zip'));
|
||||
expect([...resources.keys()].filter(name => name.startsWith('resources/src@'))).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('should not save sources when not requested', async ({ runInlineTest }, testInfo) => {
|
||||
|
@ -173,8 +171,8 @@ test('should not save sources when not requested', async ({ runInlineTest }, tes
|
|||
`,
|
||||
}, { workers: 1 });
|
||||
expect(result.exitCode).toEqual(0);
|
||||
const resources = await parseTrace(testInfo.outputPath('test-results', 'a-pass', 'trace.zip'));
|
||||
expect([...resources.keys()].filter(f => f.includes('src@'))).toHaveLength(0);
|
||||
const { resources } = await parseTrace(testInfo.outputPath('test-results', 'a-pass', 'trace.zip'));
|
||||
expect([...resources.keys()].filter(name => name.startsWith('resources/src@'))).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('should work in serial mode', async ({ runInlineTest }, testInfo) => {
|
||||
|
@ -226,7 +224,7 @@ test('should not override trace file in afterAll', async ({ runInlineTest, serve
|
|||
});
|
||||
|
||||
// Another test in the same file to affect after hooks order.
|
||||
test('test 2', async ({}) => {
|
||||
test('test 2', async ({ page }) => {
|
||||
});
|
||||
|
||||
test.afterAll(async ({ request }) => {
|
||||
|
@ -238,8 +236,10 @@ test('should not override trace file in afterAll', async ({ runInlineTest, serve
|
|||
expect(result.exitCode).toBe(1);
|
||||
expect(result.passed).toBe(1);
|
||||
expect(result.failed).toBe(1);
|
||||
expect(fs.existsSync(testInfo.outputPath('test-results', 'a-test-1', 'trace.zip'))).toBeTruthy();
|
||||
expect(fs.existsSync(testInfo.outputPath('test-results', 'a-test-1', 'trace-1.zip'))).toBeTruthy();
|
||||
const trace1 = await parseTrace(testInfo.outputPath('test-results', 'a-test-1', 'trace.zip'));
|
||||
expect(trace1.actions).toEqual(['browserContext.newPage', 'page.goto', 'apiRequestContext.get']);
|
||||
const error = await parseTrace(testInfo.outputPath('test-results', 'a-test-2', 'trace.zip')).catch(e => e);
|
||||
expect(error).toBeTruthy();
|
||||
});
|
||||
|
||||
test('should retain traces for interrupted tests', async ({ runInlineTest }, testInfo) => {
|
||||
|
@ -284,12 +284,3 @@ test('should respect --trace', async ({ runInlineTest }, testInfo) => {
|
|||
expect(result.passed).toBe(1);
|
||||
expect(fs.existsSync(testInfo.outputPath('test-results', 'a-test-1', 'trace.zip'))).toBeTruthy();
|
||||
});
|
||||
|
||||
async function parseTrace(file: string): Promise<Map<string, Buffer>> {
|
||||
const zipFS = new ZipFile(file);
|
||||
const resources = new Map<string, Buffer>();
|
||||
for (const entry of await zipFS.entries())
|
||||
resources.set(entry, await zipFS.read(entry));
|
||||
zipFS.close();
|
||||
return resources;
|
||||
}
|
||||
|
|
|
@ -443,7 +443,7 @@ test('should show multi trace source', async ({ runInlineTest, page, server, sho
|
|||
await page.click('text=passes');
|
||||
// Expect one image-link to trace viewer and 2 separate download links
|
||||
await expect(page.locator('img')).toHaveCount(1);
|
||||
await expect(page.locator('a', { hasText: 'trace' })).toHaveText(['trace-1', 'trace-2']);
|
||||
await expect(page.locator('a', { hasText: 'trace' })).toHaveText(['trace']);
|
||||
|
||||
await page.click('img');
|
||||
await page.click('.action-title >> text=page.evaluate');
|
||||
|
|
Загрузка…
Ссылка в новой задаче