fix(blob): replace projectSuffix with reportName (#25017)
Always ensure unique project/test ids across blobs. Show `reportName` as a label in the html report. References #24451.
This commit is contained in:
parent
1383844af8
commit
27c15b705d
8
.github/workflows/tests_primary.yml
vendored
8
.github/workflows/tests_primary.yml
vendored
|
|
@ -55,7 +55,7 @@ jobs:
|
|||
- run: npx playwright install --with-deps ${{ matrix.browser }} chromium
|
||||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- npm run test -- --project=${{ matrix.browser }}
|
||||
env:
|
||||
PWTEST_BLOB_SUFFIX: "-${{ matrix.browser }}-${{ matrix.os }}-node${{ matrix.node-version }}"
|
||||
PWTEST_BLOB_REPORT_NAME: "${{ matrix.browser }}-${{ matrix.os }}-node${{ matrix.node-version }}"
|
||||
- run: node tests/config/checkCoverage.js ${{ matrix.browser }}
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
|
|
@ -88,7 +88,7 @@ jobs:
|
|||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- npm run test -- --project=chromium
|
||||
env:
|
||||
PWTEST_CHANNEL: chromium-tip-of-tree
|
||||
PWTEST_BLOB_SUFFIX: "-${{ matrix.os }}-chromium-tip-of-tree"
|
||||
PWTEST_BLOB_REPORT_NAME: "${{ matrix.os }}-chromium-tip-of-tree"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -133,11 +133,11 @@ jobs:
|
|||
- run: npx playwright install --with-deps
|
||||
- run: npm run ttest -- --shard ${{ matrix.shard }}
|
||||
env:
|
||||
PWTEST_BLOB_SUFFIX: "-${{ matrix.os }}-node${{ matrix.node-version }}"
|
||||
PWTEST_BLOB_REPORT_NAME: "${{ matrix.os }}-node${{ matrix.node-version }}"
|
||||
if: matrix.os != 'ubuntu-latest'
|
||||
- run: xvfb-run npm run ttest -- --shard ${{ matrix.shard }}
|
||||
env:
|
||||
PWTEST_BLOB_SUFFIX: "-${{ matrix.os }}-node${{ matrix.node-version }}"
|
||||
PWTEST_BLOB_REPORT_NAME: "${{ matrix.os }}-node${{ matrix.node-version }}"
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
|
|
|
|||
60
.github/workflows/tests_secondary.yml
vendored
60
.github/workflows/tests_secondary.yml
vendored
|
|
@ -42,7 +42,7 @@ jobs:
|
|||
- run: npx playwright install --with-deps ${{ matrix.browser }} chromium
|
||||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- npm run test -- --project=${{ matrix.browser }}
|
||||
env:
|
||||
PWTEST_BLOB_SUFFIX: "-${{ matrix.browser }}-${{ matrix.os }}"
|
||||
PWTEST_BLOB_REPORT_NAME: "${{ matrix.browser }}-${{ matrix.os }}"
|
||||
- run: node tests/config/checkCoverage.js ${{ matrix.browser }}
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
|
|
@ -75,7 +75,7 @@ jobs:
|
|||
- run: npx playwright install --with-deps ${{ matrix.browser }} chromium
|
||||
- run: npm run test -- --project=${{ matrix.browser }}
|
||||
env:
|
||||
PWTEST_BLOB_SUFFIX: "-${{ matrix.browser }}-${{ matrix.os }}"
|
||||
PWTEST_BLOB_REPORT_NAME: "${{ matrix.browser }}-${{ matrix.os }}"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -107,7 +107,7 @@ jobs:
|
|||
- run: npm run test -- --project=${{ matrix.browser }}
|
||||
shell: bash
|
||||
env:
|
||||
PWTEST_BLOB_SUFFIX: "-${{ matrix.browser }}-windows-latest"
|
||||
PWTEST_BLOB_REPORT_NAME: "${{ matrix.browser }}-windows-latest"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -170,11 +170,11 @@ jobs:
|
|||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- npm run test -- --project=${{ matrix.browser }} --headed
|
||||
if: always() && startsWith(matrix.os, 'ubuntu-')
|
||||
env:
|
||||
PWTEST_BLOB_SUFFIX: "-${{ matrix.browser }}-headed-${{ matrix.os }}"
|
||||
PWTEST_BLOB_REPORT_NAME: "${{ matrix.browser }}-headed-${{ matrix.os }}"
|
||||
- run: npm run test -- --project=${{ matrix.browser }} --headed
|
||||
if: always() && !startsWith(matrix.os, 'ubuntu-')
|
||||
env:
|
||||
PWTEST_BLOB_SUFFIX: "-${{ matrix.browser }}-headed-${{ matrix.os }}"
|
||||
PWTEST_BLOB_REPORT_NAME: "${{ matrix.browser }}-headed-${{ matrix.os }}"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -206,7 +206,7 @@ jobs:
|
|||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- npm run ctest
|
||||
env:
|
||||
PWTEST_MODE: ${{ matrix.mode }}
|
||||
PWTEST_BLOB_SUFFIX: "-${{ matrix.mode }}"
|
||||
PWTEST_BLOB_REPORT_NAME: "${{ matrix.mode }}"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -244,7 +244,7 @@ jobs:
|
|||
env:
|
||||
PWTEST_TRACE: 1
|
||||
PWTEST_CHANNEL: ${{ matrix.channel }}
|
||||
PWTEST_BLOB_SUFFIX: "-tracing-${{ matrix.channel || matrix.browser }}"
|
||||
PWTEST_BLOB_REPORT_NAME: "tracing-${{ matrix.channel || matrix.browser }}"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -271,7 +271,7 @@ jobs:
|
|||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- npm run ctest
|
||||
env:
|
||||
PWTEST_CHANNEL: chrome
|
||||
PWTEST_BLOB_SUFFIX: "-chrome-stable-linux"
|
||||
PWTEST_BLOB_REPORT_NAME: "chrome-stable-linux"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -293,7 +293,7 @@ jobs:
|
|||
shell: bash
|
||||
env:
|
||||
PWTEST_CHANNEL: chrome
|
||||
PWTEST_BLOB_SUFFIX: "-chrome-stable-windows"
|
||||
PWTEST_BLOB_REPORT_NAME: "chrome-stable-windows"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -314,7 +314,7 @@ jobs:
|
|||
- run: npm run ctest
|
||||
env:
|
||||
PWTEST_CHANNEL: chrome
|
||||
PWTEST_BLOB_SUFFIX: "-chrome-stable-mac"
|
||||
PWTEST_BLOB_REPORT_NAME: "chrome-stable-mac"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -340,12 +340,12 @@ jobs:
|
|||
if: matrix.os == 'ubuntu-20.04'
|
||||
env:
|
||||
PWTEST_CHANNEL: chromium-tip-of-tree
|
||||
PWTEST_BLOB_SUFFIX: "-tip-of-tree-${{ matrix.os }}"
|
||||
PWTEST_BLOB_REPORT_NAME: "tip-of-tree-${{ matrix.os }}"
|
||||
- run: npm run ctest
|
||||
if: matrix.os != 'ubuntu-20.04'
|
||||
env:
|
||||
PWTEST_CHANNEL: chromium-tip-of-tree
|
||||
PWTEST_BLOB_SUFFIX: "-tip-of-tree-${{ matrix.os }}"
|
||||
PWTEST_BLOB_REPORT_NAME: "tip-of-tree-${{ matrix.os }}"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -371,12 +371,12 @@ jobs:
|
|||
if: matrix.os == 'ubuntu-latest'
|
||||
env:
|
||||
PWTEST_CHANNEL: chromium-tip-of-tree
|
||||
PWTEST_BLOB_SUFFIX: "-tip-of-tree-headed-${{ matrix.os }}"
|
||||
PWTEST_BLOB_REPORT_NAME: "tip-of-tree-headed-${{ matrix.os }}"
|
||||
- run: npm run ctest -- --headed
|
||||
if: matrix.os != 'ubuntu-latest'
|
||||
env:
|
||||
PWTEST_CHANNEL: chromium-tip-of-tree
|
||||
PWTEST_BLOB_SUFFIX: "-tip-of-tree-headed-${{ matrix.os }}"
|
||||
PWTEST_BLOB_REPORT_NAME: "tip-of-tree-headed-${{ matrix.os }}"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -397,7 +397,7 @@ jobs:
|
|||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- npm run ftest
|
||||
env:
|
||||
PWTEST_CHANNEL: firefox-beta
|
||||
PWTEST_BLOB_SUFFIX: "-firefox-beta-linux"
|
||||
PWTEST_BLOB_REPORT_NAME: "firefox-beta-linux"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -419,7 +419,7 @@ jobs:
|
|||
shell: bash
|
||||
env:
|
||||
PWTEST_CHANNEL: firefox-beta
|
||||
PWTEST_BLOB_SUFFIX: "-firefox-beta-windows"
|
||||
PWTEST_BLOB_REPORT_NAME: "firefox-beta-windows"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -440,7 +440,7 @@ jobs:
|
|||
- run: npm run ftest
|
||||
env:
|
||||
PWTEST_CHANNEL: firefox-beta
|
||||
PWTEST_BLOB_SUFFIX: "-firefox-beta-mac"
|
||||
PWTEST_BLOB_REPORT_NAME: "firefox-beta-mac"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -461,7 +461,7 @@ jobs:
|
|||
- run: npm run ctest
|
||||
env:
|
||||
PWTEST_CHANNEL: msedge
|
||||
PWTEST_BLOB_SUFFIX: "-edge-stable-mac"
|
||||
PWTEST_BLOB_REPORT_NAME: "edge-stable-mac"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -483,7 +483,7 @@ jobs:
|
|||
shell: bash
|
||||
env:
|
||||
PWTEST_CHANNEL: msedge
|
||||
PWTEST_BLOB_SUFFIX: "-edge-stable-windows"
|
||||
PWTEST_BLOB_REPORT_NAME: "edge-stable-windows"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -504,7 +504,7 @@ jobs:
|
|||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- npm run ctest
|
||||
env:
|
||||
PWTEST_CHANNEL: msedge
|
||||
PWTEST_BLOB_SUFFIX: "-edge-stable-linux"
|
||||
PWTEST_BLOB_REPORT_NAME: "edge-stable-linux"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -525,7 +525,7 @@ jobs:
|
|||
- run: npm run ctest
|
||||
env:
|
||||
PWTEST_CHANNEL: msedge-beta
|
||||
PWTEST_BLOB_SUFFIX: "-edge-beta-mac"
|
||||
PWTEST_BLOB_REPORT_NAME: "edge-beta-mac"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -547,7 +547,7 @@ jobs:
|
|||
shell: bash
|
||||
env:
|
||||
PWTEST_CHANNEL: msedge-beta
|
||||
PWTEST_BLOB_SUFFIX: "-edge-beta-windows"
|
||||
PWTEST_BLOB_REPORT_NAME: "edge-beta-windows"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -568,7 +568,7 @@ jobs:
|
|||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- npm run ctest
|
||||
env:
|
||||
PWTEST_CHANNEL: msedge-beta
|
||||
PWTEST_BLOB_SUFFIX: "-edge-beta-linux"
|
||||
PWTEST_BLOB_REPORT_NAME: "edge-beta-linux"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -589,7 +589,7 @@ jobs:
|
|||
- run: npm run ctest
|
||||
env:
|
||||
PWTEST_CHANNEL: msedge-dev
|
||||
PWTEST_BLOB_SUFFIX: "-edge-dev-mac"
|
||||
PWTEST_BLOB_REPORT_NAME: "edge-dev-mac"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -611,7 +611,7 @@ jobs:
|
|||
shell: bash
|
||||
env:
|
||||
PWTEST_CHANNEL: msedge-dev
|
||||
PWTEST_BLOB_SUFFIX: "-edge-dev-windows"
|
||||
PWTEST_BLOB_REPORT_NAME: "edge-dev-windows"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -632,7 +632,7 @@ jobs:
|
|||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- npm run ctest
|
||||
env:
|
||||
PWTEST_CHANNEL: msedge-dev
|
||||
PWTEST_BLOB_SUFFIX: "-edge-dev-linux"
|
||||
PWTEST_BLOB_REPORT_NAME: "edge-dev-linux"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -653,7 +653,7 @@ jobs:
|
|||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- npm run ctest
|
||||
env:
|
||||
PWTEST_CHANNEL: chrome-beta
|
||||
PWTEST_BLOB_SUFFIX: "-chrome-beta-linux"
|
||||
PWTEST_BLOB_REPORT_NAME: "chrome-beta-linux"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -675,7 +675,7 @@ jobs:
|
|||
shell: bash
|
||||
env:
|
||||
PWTEST_CHANNEL: chrome-beta
|
||||
PWTEST_BLOB_SUFFIX: "-chrome-beta-windows"
|
||||
PWTEST_BLOB_REPORT_NAME: "chrome-beta-windows"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -696,7 +696,7 @@ jobs:
|
|||
- run: npm run ctest
|
||||
env:
|
||||
PWTEST_CHANNEL: chrome-beta
|
||||
PWTEST_BLOB_SUFFIX: "-chrome-beta-mac"
|
||||
PWTEST_BLOB_REPORT_NAME: "chrome-beta-mac"
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
shell: bash
|
||||
|
|
@ -731,7 +731,7 @@ jobs:
|
|||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- npm run test -- --project=chromium
|
||||
env:
|
||||
PLAYWRIGHT_CHROMIUM_USE_HEADLESS_NEW: 1
|
||||
PWTEST_BLOB_SUFFIX: "-headless-new"
|
||||
PWTEST_BLOB_REPORT_NAME: "headless-new"
|
||||
- run: node tests/config/checkCoverage.js chromium
|
||||
- run: ./utils/upload_flakiness_dashboard.sh ./test-results/report.json
|
||||
if: always()
|
||||
|
|
|
|||
2
.github/workflows/tests_service.yml
vendored
2
.github/workflows/tests_service.yml
vendored
|
|
@ -26,7 +26,7 @@ jobs:
|
|||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- npm run test -- --project=${{ matrix.browser }} --workers=10 --retries=0
|
||||
env:
|
||||
PWTEST_MODE: service2
|
||||
PWTEST_BLOB_SUFFIX: "-${{ matrix.browser }}-${{ matrix.service-os }}-service"
|
||||
PWTEST_BLOB_REPORT_NAME: "${{ matrix.browser }}-${{ matrix.service-os }}-service"
|
||||
PLAYWRIGHT_SERVICE_ACCESS_KEY: ${{ secrets.PLAYWRIGHT_SERVICE_ACCESS_KEY }}
|
||||
PLAYWRIGHT_SERVICE_URL: ${{ secrets.PLAYWRIGHT_SERVICE_URL }}
|
||||
PLAYWRIGHT_SERVICE_OS: ${{ matrix.service-os }}
|
||||
|
|
|
|||
|
|
@ -108,7 +108,7 @@ export class Filter {
|
|||
if (test.outcome === 'skipped')
|
||||
status = 'skipped';
|
||||
const searchValues: SearchValues = {
|
||||
text: (status + ' ' + test.projectName + ' ' + test.location.file + ' ' + test.path.join(' ') + ' ' + test.title).toLowerCase(),
|
||||
text: (status + ' ' + test.projectName + ' ' + (test.reportName || '') + ' ' + test.location.file + ' ' + test.path.join(' ') + ' ' + test.title).toLowerCase(),
|
||||
project: test.projectName.toLowerCase(),
|
||||
status: status as any,
|
||||
file: test.location.file,
|
||||
|
|
|
|||
|
|
@ -14,6 +14,8 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import type { TestCaseSummary } from './types';
|
||||
|
||||
export function escapeRegExp(string: string) {
|
||||
const reRegExpChar = /[\\^$.*+?()[\]{}|]/g;
|
||||
const reHasRegExpChar = RegExp(reRegExpChar.source);
|
||||
|
|
@ -23,9 +25,16 @@ export function escapeRegExp(string: string) {
|
|||
: (string || '');
|
||||
}
|
||||
|
||||
export function testCaseLabels(test: TestCaseSummary): string[] {
|
||||
const tags = matchTags(test.path.join(' ') + ' ' + test.title).sort((a, b) => a.localeCompare(b));
|
||||
if (test.reportName)
|
||||
tags.unshift(test.reportName);
|
||||
return tags;
|
||||
}
|
||||
|
||||
// match all tags in test title
|
||||
export function matchTags(title: string): string[] {
|
||||
return title.match(/@([\S]+)/g)?.map(tag => tag.slice(1)) || [];
|
||||
function matchTags(title: string): string[] {
|
||||
return title.match(/@([\S]+)/g) || [];
|
||||
}
|
||||
|
||||
// hash string to integer in range [0, 6] for color index, to get same color for same tag
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ import { ProjectLink } from './links';
|
|||
import { statusIcon } from './statusIcon';
|
||||
import './testCaseView.css';
|
||||
import { TestResultView } from './testResultView';
|
||||
import { hashStringToInt, matchTags } from './labelUtils';
|
||||
import { hashStringToInt, testCaseLabels } from './labelUtils';
|
||||
import { msToString } from './uiUtils';
|
||||
|
||||
export const TestCaseView: React.FC<{
|
||||
|
|
@ -37,7 +37,7 @@ export const TestCaseView: React.FC<{
|
|||
const labels = React.useMemo(() => {
|
||||
if (!test)
|
||||
return undefined;
|
||||
return matchTags(test.path.join(' ') + ' ' + test.title).sort((a, b) => a.localeCompare(b));
|
||||
return testCaseLabels(test);
|
||||
}, [test]);
|
||||
|
||||
return <div className='test-case-column vbox'>
|
||||
|
|
@ -92,10 +92,10 @@ const LabelsLinkView: React.FC<React.PropsWithChildren<{
|
|||
}>> = ({ labels }) => {
|
||||
return labels.length > 0 ? (
|
||||
<>
|
||||
{labels.map(tag => (
|
||||
<a key={tag} style={{ textDecoration: 'none', color: 'var(--color-fg-default)' }} href={`#?q=@${tag}`} >
|
||||
<span style={{ margin: '6px 0 0 6px', cursor: 'pointer' }} className={'label label-color-' + (hashStringToInt(tag))}>
|
||||
{tag}
|
||||
{labels.map(label => (
|
||||
<a key={label} style={{ textDecoration: 'none', color: 'var(--color-fg-default)' }} href={`#?q=${label}`} >
|
||||
<span style={{ margin: '6px 0 0 6px', cursor: 'pointer' }} className={'label label-color-' + (hashStringToInt(label))}>
|
||||
{label.startsWith('@') ? label.slice(1) : label}
|
||||
</span>
|
||||
</a>
|
||||
))}
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ import { generateTraceUrl, Link, navigate, ProjectLink } from './links';
|
|||
import { statusIcon } from './statusIcon';
|
||||
import './testFileView.css';
|
||||
import { video, image, trace } from './icons';
|
||||
import { hashStringToInt, matchTags } from './labelUtils';
|
||||
import { hashStringToInt, testCaseLabels } from './labelUtils';
|
||||
|
||||
export const TestFileView: React.FC<React.PropsWithChildren<{
|
||||
report: HTMLReport;
|
||||
|
|
@ -32,8 +32,6 @@ export const TestFileView: React.FC<React.PropsWithChildren<{
|
|||
setFileExpanded: (fileId: string, expanded: boolean) => void;
|
||||
filter: Filter;
|
||||
}>> = ({ file, report, isFileExpanded, setFileExpanded, filter }) => {
|
||||
const labels = React.useCallback((test: TestCaseSummary) => matchTags(test.path.join(' ') + ' ' + test?.title).sort((a, b) => a.localeCompare(b)), []);
|
||||
|
||||
return <Chip
|
||||
expanded={isFileExpanded(file.fileId)}
|
||||
noInsets={true}
|
||||
|
|
@ -54,7 +52,7 @@ export const TestFileView: React.FC<React.PropsWithChildren<{
|
|||
</Link>
|
||||
{report.projectNames.length > 1 && !!test.projectName &&
|
||||
<ProjectLink projectNames={report.projectNames} projectName={test.projectName} />}
|
||||
<LabelsClickView labels={labels(test)} />
|
||||
<LabelsClickView labels={testCaseLabels(test)} />
|
||||
</span>
|
||||
</div>
|
||||
<span data-testid='test-duration' style={{ minWidth: '50px', textAlign: 'right' }}>{msToString(test.duration)}</span>
|
||||
|
|
@ -93,33 +91,34 @@ const LabelsClickView: React.FC<React.PropsWithChildren<{
|
|||
labels: string[],
|
||||
}>> = ({ labels }) => {
|
||||
|
||||
const onClickHandle = (e: React.MouseEvent, tag: string) => {
|
||||
const onClickHandle = (e: React.MouseEvent, label: string) => {
|
||||
e.preventDefault();
|
||||
const searchParams = new URLSearchParams(window.location.hash.slice(1));
|
||||
let q = searchParams.get('q')?.toString() || '';
|
||||
|
||||
// if metaKey or ctrlKey is pressed, add tag to search query without replacing existing tags
|
||||
// if metaKey or ctrlKey is pressed and tag is already in search query, remove tag from search query
|
||||
if (e.metaKey || e.ctrlKey) {
|
||||
if (!q.includes(`@${tag}`))
|
||||
q = `${q} @${tag}`.trim();
|
||||
// If metaKey or ctrlKey is pressed, add tag to search query without replacing existing tags.
|
||||
// If metaKey or ctrlKey is pressed and tag is already in search query, remove tag from search query.
|
||||
// Always toggle non-@-tag labels.
|
||||
if (e.metaKey || e.ctrlKey || !label.startsWith('@')) {
|
||||
if (!q.includes(label))
|
||||
q = `${q} ${label}`.trim();
|
||||
else
|
||||
q = q.split(' ').filter(t => t !== `@${tag}`).join(' ').trim();
|
||||
// if metaKey or ctrlKey is not pressed, replace existing tags with new tag
|
||||
q = q.split(' ').filter(t => t !== label).join(' ').trim();
|
||||
} else {
|
||||
// if metaKey or ctrlKey is not pressed, replace existing tags with new tag
|
||||
if (!q.includes('@'))
|
||||
q = `${q} @${tag}`.trim();
|
||||
q = `${q} ${label}`.trim();
|
||||
else
|
||||
q = (q.split(' ').filter(t => !t.startsWith('@')).join(' ').trim() + ` @${tag}`).trim();
|
||||
q = (q.split(' ').filter(t => !t.startsWith('@')).join(' ').trim() + ` ${label}`).trim();
|
||||
}
|
||||
navigate(q ? `#?q=${q}` : '#');
|
||||
};
|
||||
|
||||
return labels.length > 0 ? (
|
||||
<>
|
||||
{labels.map(tag => (
|
||||
<span key={tag} style={{ margin: '6px 0 0 6px', cursor: 'pointer' }} className={'label label-color-' + (hashStringToInt(tag))} onClick={e => onClickHandle(e, tag)}>
|
||||
{tag}
|
||||
{labels.map(label => (
|
||||
<span key={label} style={{ margin: '6px 0 0 6px', cursor: 'pointer' }} className={'label label-color-' + (hashStringToInt(label))} onClick={e => onClickHandle(e, label)}>
|
||||
{label.startsWith('@') ? label.slice(1) : label}
|
||||
</span>
|
||||
))}
|
||||
</>
|
||||
|
|
|
|||
|
|
@ -64,6 +64,7 @@ export type TestCaseSummary = {
|
|||
title: string;
|
||||
path: string[];
|
||||
projectName: string;
|
||||
reportName?: string;
|
||||
location: Location;
|
||||
annotations: TestCaseAnnotation[];
|
||||
outcome: 'skipped' | 'expected' | 'unexpected' | 'flaky';
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ export const currentBlobReportVersion = 1;
|
|||
|
||||
export type BlobReportMetadata = {
|
||||
version: number;
|
||||
projectSuffix?: string;
|
||||
name?: string;
|
||||
shard?: { total: number, current: number };
|
||||
};
|
||||
|
||||
|
|
@ -55,7 +55,7 @@ export class BlobReporter extends TeleReporterEmitter {
|
|||
override onConfigure(config: FullConfig) {
|
||||
const metadata: BlobReportMetadata = {
|
||||
version: currentBlobReportVersion,
|
||||
projectSuffix: process.env.PWTEST_BLOB_SUFFIX,
|
||||
name: process.env.PWTEST_BLOB_REPORT_NAME,
|
||||
shard: config.shard ?? undefined,
|
||||
};
|
||||
this._messages.push({
|
||||
|
|
@ -103,8 +103,8 @@ export class BlobReporter extends TeleReporterEmitter {
|
|||
|
||||
private _computeReportName(config: FullConfig) {
|
||||
let reportName = 'report';
|
||||
if (process.env.PWTEST_BLOB_SUFFIX)
|
||||
reportName += sanitizeForFilePath(process.env.PWTEST_BLOB_SUFFIX);
|
||||
if (process.env.PWTEST_BLOB_REPORT_NAME)
|
||||
reportName += sanitizeForFilePath(process.env.PWTEST_BLOB_REPORT_NAME);
|
||||
if (config.shard) {
|
||||
const paddedNumber = `${config.shard.current}`.padStart(`${config.shard.total}`.length, '0');
|
||||
reportName += `-${paddedNumber}`;
|
||||
|
|
|
|||
|
|
@ -217,7 +217,7 @@ class HtmlBuilder {
|
|||
}
|
||||
const { testFile, testFileSummary } = fileEntry;
|
||||
const testEntries: TestEntry[] = [];
|
||||
this._processJsonSuite(file, fileId, projectJson.project.name, [], testEntries);
|
||||
this._processJsonSuite(file, fileId, projectJson.project.name, projectJson.project.metadata?.reportName, [], testEntries);
|
||||
for (const test of testEntries) {
|
||||
testFile.tests.push(test.testCase);
|
||||
testFileSummary.tests.push(test.testCaseSummary);
|
||||
|
|
@ -314,13 +314,13 @@ class HtmlBuilder {
|
|||
this._dataZipFile.addBuffer(Buffer.from(JSON.stringify(data)), fileName);
|
||||
}
|
||||
|
||||
private _processJsonSuite(suite: JsonSuite, fileId: string, projectName: string, path: string[], outTests: TestEntry[]) {
|
||||
private _processJsonSuite(suite: JsonSuite, fileId: string, projectName: string, reportName: string | undefined, path: string[], outTests: TestEntry[]) {
|
||||
const newPath = [...path, suite.title];
|
||||
suite.suites.map(s => this._processJsonSuite(s, fileId, projectName, newPath, outTests));
|
||||
suite.tests.forEach(t => outTests.push(this._createTestEntry(t, projectName, newPath)));
|
||||
suite.suites.map(s => this._processJsonSuite(s, fileId, projectName, reportName, newPath, outTests));
|
||||
suite.tests.forEach(t => outTests.push(this._createTestEntry(t, projectName, reportName, newPath)));
|
||||
}
|
||||
|
||||
private _createTestEntry(test: JsonTestCase, projectName: string, path: string[]): TestEntry {
|
||||
private _createTestEntry(test: JsonTestCase, projectName: string, reportName: string | undefined, path: string[]): TestEntry {
|
||||
const duration = test.results.reduce((a, r) => a + r.duration, 0);
|
||||
this._tests.set(test.testId, test);
|
||||
const location = test.location;
|
||||
|
|
@ -334,6 +334,7 @@ class HtmlBuilder {
|
|||
testId: test.testId,
|
||||
title: test.title,
|
||||
projectName,
|
||||
reportName,
|
||||
location,
|
||||
duration,
|
||||
annotations: test.annotations,
|
||||
|
|
@ -346,6 +347,7 @@ class HtmlBuilder {
|
|||
testId: test.testId,
|
||||
title: test.title,
|
||||
projectName,
|
||||
reportName,
|
||||
location,
|
||||
duration,
|
||||
annotations: test.annotations,
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ import { TeleReporterReceiver } from '../isomorphic/teleReceiver';
|
|||
import { JsonStringInternalizer, StringInternPool } from '../isomorphic/stringInternPool';
|
||||
import { createReporters } from '../runner/reporters';
|
||||
import { Multiplexer } from './multiplexer';
|
||||
import { ZipFile } from 'playwright-core/lib/utils';
|
||||
import { ZipFile, calculateSha1 } from 'playwright-core/lib/utils';
|
||||
import { currentBlobReportVersion, type BlobReportMetadata } from './blob';
|
||||
import { relativeFilePath } from '../util';
|
||||
|
||||
|
|
@ -74,8 +74,8 @@ function parseEvents(reportJsonl: Buffer): JsonEvent[] {
|
|||
return reportJsonl.toString().split('\n').filter(line => line.length).map(line => JSON.parse(line)) as JsonEvent[];
|
||||
}
|
||||
|
||||
async function extractAndParseReports(dir: string, shardFiles: string[], stringPool: StringInternPool, printStatus: StatusCallback): Promise<{ metadata: BlobReportMetadata, parsedEvents: JsonEvent[] }[]> {
|
||||
const shardEvents = [];
|
||||
async function extractAndParseReports(dir: string, shardFiles: string[], stringPool: StringInternPool, printStatus: StatusCallback) {
|
||||
const shardEvents: { file: string, metadata: BlobReportMetadata, parsedEvents: JsonEvent[] }[] = [];
|
||||
await fs.promises.mkdir(path.join(dir, 'resources'), { recursive: true });
|
||||
|
||||
const internalizer = new JsonStringInternalizer(stringPool);
|
||||
|
|
@ -85,7 +85,7 @@ async function extractAndParseReports(dir: string, shardFiles: string[], stringP
|
|||
printStatus(`extracting: ${relativeFilePath(absolutePath)}`);
|
||||
const zipFile = new ZipFile(absolutePath);
|
||||
const entryNames = await zipFile.entries();
|
||||
for (const entryName of entryNames) {
|
||||
for (const entryName of entryNames.sort()) {
|
||||
const content = await zipFile.read(entryName);
|
||||
if (entryName.endsWith('.jsonl')) {
|
||||
const parsedEvents = parseEvents(content);
|
||||
|
|
@ -94,6 +94,7 @@ async function extractAndParseReports(dir: string, shardFiles: string[], stringP
|
|||
// as a post-processing step.
|
||||
internalizer.traverse(parsedEvents);
|
||||
shardEvents.push({
|
||||
file,
|
||||
metadata: findMetadata(parsedEvents, file),
|
||||
parsedEvents
|
||||
});
|
||||
|
|
@ -117,20 +118,43 @@ function findMetadata(events: JsonEvent[], file: string): BlobReportMetadata {
|
|||
}
|
||||
|
||||
async function mergeEvents(dir: string, shardReportFiles: string[], printStatus: StatusCallback) {
|
||||
const stringPool = new StringInternPool();
|
||||
const events: JsonEvent[] = [];
|
||||
const configureEvents: JsonEvent[] = [];
|
||||
const beginEvents: JsonEvent[] = [];
|
||||
const endEvents: JsonEvent[] = [];
|
||||
const stringPool = new StringInternPool();
|
||||
const shardEvents = await extractAndParseReports(dir, shardReportFiles, stringPool, printStatus);
|
||||
shardEvents.sort((a, b) => {
|
||||
|
||||
const blobs = await extractAndParseReports(dir, shardReportFiles, stringPool, printStatus);
|
||||
// Sort by (report name; shard; file name), so that salt generation below is deterministic when:
|
||||
// - report names are unique;
|
||||
// - report names are missing;
|
||||
// - report names are clashing between shards.
|
||||
blobs.sort((a, b) => {
|
||||
const nameA = a.metadata.name ?? '';
|
||||
const nameB = b.metadata.name ?? '';
|
||||
if (nameA !== nameB)
|
||||
return nameA.localeCompare(nameB);
|
||||
const shardA = a.metadata.shard?.current ?? 0;
|
||||
const shardB = b.metadata.shard?.current ?? 0;
|
||||
return shardA - shardB;
|
||||
if (shardA !== shardB)
|
||||
return shardA - shardB;
|
||||
return a.file.localeCompare(b.file);
|
||||
});
|
||||
const allTestIds = new Set<string>();
|
||||
|
||||
const saltSet = new Set<string>();
|
||||
|
||||
printStatus(`merging events`);
|
||||
for (const { parsedEvents } of shardEvents) {
|
||||
|
||||
for (const { file, parsedEvents, metadata } of blobs) {
|
||||
// Generate unique salt for each blob.
|
||||
const sha1 = calculateSha1(metadata.name || path.basename(file)).substring(0, 16);
|
||||
let salt = sha1;
|
||||
for (let i = 0; saltSet.has(salt); i++)
|
||||
salt = sha1 + '-' + i;
|
||||
saltSet.add(salt);
|
||||
|
||||
new IdsPatcher(stringPool, metadata.name, salt).patchEvents(parsedEvents);
|
||||
|
||||
for (const event of parsedEvents) {
|
||||
if (event.method === 'onConfigure')
|
||||
configureEvents.push(event);
|
||||
|
|
@ -138,9 +162,7 @@ async function mergeEvents(dir: string, shardReportFiles: string[], printStatus:
|
|||
beginEvents.push(event);
|
||||
else if (event.method === 'onEnd')
|
||||
endEvents.push(event);
|
||||
else if (event.method === 'onBlobReportMetadata')
|
||||
new ProjectNamePatcher(allTestIds, stringPool, event.params.projectSuffix || '').patchEvents(parsedEvents);
|
||||
else
|
||||
else if (event.method !== 'onBlobReportMetadata')
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
|
|
@ -248,13 +270,8 @@ function printStatusToStdout(message: string) {
|
|||
process.stdout.write(`${message}\n`);
|
||||
}
|
||||
|
||||
class ProjectNamePatcher {
|
||||
private _testIds = new Set<string>();
|
||||
|
||||
constructor(
|
||||
private _allTestIds: Set<string>,
|
||||
private _stringPool: StringInternPool,
|
||||
private _projectNameSuffix: string) {
|
||||
class IdsPatcher {
|
||||
constructor(private _stringPool: StringInternPool, private _reportName: string | undefined, private _salt: string) {
|
||||
}
|
||||
|
||||
patchEvents(events: JsonEvent[]) {
|
||||
|
|
@ -275,48 +292,32 @@ class ProjectNamePatcher {
|
|||
continue;
|
||||
}
|
||||
}
|
||||
for (const testId of this._testIds)
|
||||
this._allTestIds.add(testId);
|
||||
}
|
||||
|
||||
private _onBegin(config: JsonConfig, projects: JsonProject[]) {
|
||||
for (const project of projects)
|
||||
project.name += this._projectNameSuffix;
|
||||
this._updateProjectIds(projects);
|
||||
for (const project of projects)
|
||||
project.suites.forEach(suite => this._updateTestIds(suite));
|
||||
}
|
||||
|
||||
private _updateProjectIds(projects: JsonProject[]) {
|
||||
const usedNames = new Set<string>();
|
||||
for (const p of projects) {
|
||||
for (const project of projects) {
|
||||
project.metadata = project.metadata ?? {};
|
||||
project.metadata.reportName = this._reportName;
|
||||
for (let i = 0; i < projects.length; ++i) {
|
||||
const candidate = p.name + (i ? i : '');
|
||||
const candidate = (project.name + this._salt) + (i ? i : '');
|
||||
if (usedNames.has(candidate))
|
||||
continue;
|
||||
p.id = candidate;
|
||||
project.id = candidate;
|
||||
usedNames.add(candidate);
|
||||
break;
|
||||
}
|
||||
}
|
||||
for (const project of projects)
|
||||
project.suites.forEach(suite => this._updateTestIds(suite));
|
||||
}
|
||||
|
||||
private _updateTestIds(suite: JsonSuite) {
|
||||
suite.tests.forEach(test => {
|
||||
test.testId = this._mapTestId(test.testId);
|
||||
this._testIds.add(test.testId);
|
||||
});
|
||||
suite.tests.forEach(test => test.testId = this._mapTestId(test.testId));
|
||||
suite.suites.forEach(suite => this._updateTestIds(suite));
|
||||
}
|
||||
|
||||
private _mapTestId(testId: string): string {
|
||||
testId = testId + this._projectNameSuffix;
|
||||
// Consider a setup project running on each shard. In this case we'll have
|
||||
// the same testId (from setup project) in multiple blob reports.
|
||||
// To avoid reporters being confused by clashing test ids, we automatically
|
||||
// make them unique and produce a separate test from each blob.
|
||||
while (this._allTestIds.has(testId))
|
||||
testId = testId + '1';
|
||||
return this._stringPool.internString(testId);
|
||||
return this._stringPool.internString(testId + this._salt);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -213,7 +213,7 @@ export function cleanEnv(env: NodeJS.ProcessEnv): NodeJS.ProcessEnv {
|
|||
PW_TEST_REPORTER_WS_ENDPOINT: undefined,
|
||||
PW_TEST_SOURCE_TRANSFORM: undefined,
|
||||
PW_TEST_SOURCE_TRANSFORM_SCOPE: undefined,
|
||||
PWTEST_BLOB_SUFFIX: undefined,
|
||||
PWTEST_BLOB_REPORT_NAME: undefined,
|
||||
TEST_WORKER_INDEX: undefined,
|
||||
TEST_PARLLEL_INDEX: undefined,
|
||||
NODE_OPTIONS: undefined,
|
||||
|
|
|
|||
|
|
@ -652,7 +652,7 @@ test('resource names should not clash between runs', async ({ runInlineTest, sho
|
|||
reportFiles.sort();
|
||||
expect(reportFiles).toEqual(['report-1.zip', 'report-2.zip']);
|
||||
|
||||
const { exitCode } = await mergeReports(reportDir, {}, { additionalArgs: ['--reporter', 'html'] });
|
||||
const { exitCode } = await mergeReports(reportDir, { 'PW_TEST_HTML_REPORT_OPEN': 'never' }, { additionalArgs: ['--reporter', 'html'] });
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
await showReport();
|
||||
|
|
@ -852,7 +852,7 @@ test('onError in the report', async ({ runInlineTest, mergeReports, showReport,
|
|||
const result = await runInlineTest(files, { shard: `1/3` });
|
||||
expect(result.exitCode).toBe(1);
|
||||
|
||||
const { exitCode } = await mergeReports(reportDir, {}, { additionalArgs: ['--reporter', 'html'] });
|
||||
const { exitCode } = await mergeReports(reportDir, { 'PW_TEST_HTML_REPORT_OPEN': 'never' }, { additionalArgs: ['--reporter', 'html'] });
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
await showReport();
|
||||
|
|
@ -1119,47 +1119,6 @@ test('preserve steps in html report', async ({ runInlineTest, mergeReports, show
|
|||
await expect(page.getByText('expect.toBe')).toBeVisible();
|
||||
});
|
||||
|
||||
test('custom project suffix', async ({ runInlineTest, mergeReports }) => {
|
||||
const reportDir = test.info().outputPath('blob-report');
|
||||
const files = {
|
||||
'echo-reporter.js': `
|
||||
import fs from 'fs';
|
||||
|
||||
class EchoReporter {
|
||||
onBegin(config, suite) {
|
||||
const projects = suite.suites.map(s => s.project().name);
|
||||
console.log('projects:' + projects);
|
||||
}
|
||||
}
|
||||
module.exports = EchoReporter;
|
||||
`,
|
||||
'playwright.config.ts': `
|
||||
module.exports = {
|
||||
reporter: 'blob',
|
||||
projects: [
|
||||
{ name: 'foo' },
|
||||
{ name: 'bar' },
|
||||
]
|
||||
};
|
||||
`,
|
||||
'a.test.js': `
|
||||
import { test, expect } from '@playwright/test';
|
||||
test('math 1', async ({}) => {});
|
||||
`,
|
||||
};
|
||||
|
||||
await runInlineTest(files, { shard: `1/2` }, { PWTEST_BLOB_SUFFIX: '-suffix', PWTEST_BLOB_DO_NOT_REMOVE: '1' });
|
||||
await runInlineTest(files, { shard: `2/2` }, { PWTEST_BLOB_SUFFIX: '-suffix', PWTEST_BLOB_DO_NOT_REMOVE: '1' });
|
||||
|
||||
const reportFiles = await fs.promises.readdir(reportDir);
|
||||
reportFiles.sort();
|
||||
expect(reportFiles).toEqual(['report-suffix-1.zip', 'report-suffix-2.zip']);
|
||||
|
||||
const { exitCode, output } = await mergeReports(reportDir, {}, { additionalArgs: ['--reporter', test.info().outputPath('echo-reporter.js')] });
|
||||
expect(exitCode).toBe(0);
|
||||
expect(output).toContain(`projects:foo-suffix,bar-suffix`);
|
||||
});
|
||||
|
||||
test('same project different suffixes', async ({ runInlineTest, mergeReports }) => {
|
||||
const files = {
|
||||
'echo-reporter.js': `
|
||||
|
|
@ -1167,9 +1126,9 @@ test('same project different suffixes', async ({ runInlineTest, mergeReports })
|
|||
|
||||
class EchoReporter {
|
||||
onBegin(config, suite) {
|
||||
const projects = suite.suites.map(s => s.project().name);
|
||||
projects.sort();
|
||||
console.log('projects:' + projects);
|
||||
const projects = suite.suites.map(s => s.project()).sort((a, b) => a.metadata.reportName.localeCompare(b.metadata.reportName));
|
||||
console.log('projectNames: ' + projects.map(p => p.name));
|
||||
console.log('reportNames: ' + projects.map(p => p.metadata.reportName));
|
||||
}
|
||||
}
|
||||
module.exports = EchoReporter;
|
||||
|
|
@ -1184,17 +1143,18 @@ test('same project different suffixes', async ({ runInlineTest, mergeReports })
|
|||
`,
|
||||
'a.test.js': `
|
||||
import { test, expect } from '@playwright/test';
|
||||
test('math 1', async ({}) => {});
|
||||
test('math 1 @smoke', async ({}) => {});
|
||||
`,
|
||||
};
|
||||
|
||||
await runInlineTest(files, undefined, { PWTEST_BLOB_SUFFIX: '-first' });
|
||||
await runInlineTest(files, undefined, { PWTEST_BLOB_SUFFIX: '-second', PWTEST_BLOB_DO_NOT_REMOVE: '1' });
|
||||
await runInlineTest(files, undefined, { PWTEST_BLOB_REPORT_NAME: 'first' });
|
||||
await runInlineTest(files, undefined, { PWTEST_BLOB_REPORT_NAME: 'second', PWTEST_BLOB_DO_NOT_REMOVE: '1' });
|
||||
|
||||
const reportDir = test.info().outputPath('blob-report');
|
||||
const { exitCode, output } = await mergeReports(reportDir, {}, { additionalArgs: ['--reporter', test.info().outputPath('echo-reporter.js')] });
|
||||
expect(exitCode).toBe(0);
|
||||
expect(output).toContain(`projects:foo-first,foo-second`);
|
||||
expect(output).toContain(`projectNames: foo,foo`);
|
||||
expect(output).toContain(`reportNames: first,second`);
|
||||
});
|
||||
|
||||
test('no reports error', async ({ runInlineTest, mergeReports }) => {
|
||||
|
|
@ -1300,7 +1260,7 @@ test('merge-reports should throw if report version is from the future', async ({
|
|||
zipFile.end();
|
||||
await zipFinishPromise;
|
||||
|
||||
const { exitCode, output } = await mergeReports(reportDir, {}, { additionalArgs: ['--reporter', 'html'] });
|
||||
const { exitCode, output } = await mergeReports(reportDir, { 'PW_TEST_HTML_REPORT_OPEN': 'never' }, { additionalArgs: ['--reporter', 'html'] });
|
||||
expect(exitCode).toBe(1);
|
||||
expect(output).toContain(`Error: Blob report report-2.zip was created with a newer version of Playwright.`);
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue