screenshot/test_1
This commit is contained in:
557
node_modules/playwright/lib/runner/dispatcher.js
generated
vendored
Normal file
557
node_modules/playwright/lib/runner/dispatcher.js
generated
vendored
Normal file
@@ -0,0 +1,557 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.Dispatcher = void 0;
|
||||
var _utils = require("playwright-core/lib/utils");
|
||||
var _rebase = require("./rebase");
|
||||
var _workerHost = require("./workerHost");
|
||||
var _ipc = require("../common/ipc");
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
class Dispatcher {
|
||||
constructor(config, reporter, failureTracker) {
|
||||
this._workerSlots = [];
|
||||
this._queue = [];
|
||||
this._queuedOrRunningHashCount = new Map();
|
||||
this._finished = new _utils.ManualPromise();
|
||||
this._isStopped = true;
|
||||
this._config = void 0;
|
||||
this._reporter = void 0;
|
||||
this._failureTracker = void 0;
|
||||
this._extraEnvByProjectId = new Map();
|
||||
this._producedEnvByProjectId = new Map();
|
||||
this._config = config;
|
||||
this._reporter = reporter;
|
||||
this._failureTracker = failureTracker;
|
||||
}
|
||||
async _scheduleJob() {
|
||||
// 1. Find a job to run.
|
||||
if (this._isStopped || !this._queue.length) return;
|
||||
const job = this._queue[0];
|
||||
|
||||
// 2. Find a worker with the same hash, or just some free worker.
|
||||
let index = this._workerSlots.findIndex(w => !w.busy && w.worker && w.worker.hash() === job.workerHash && !w.worker.didSendStop());
|
||||
if (index === -1) index = this._workerSlots.findIndex(w => !w.busy);
|
||||
// No workers available, bail out.
|
||||
if (index === -1) return;
|
||||
|
||||
// 3. Claim both the job and the worker, run the job and release the worker.
|
||||
this._queue.shift();
|
||||
this._workerSlots[index].busy = true;
|
||||
await this._startJobInWorker(index, job);
|
||||
this._workerSlots[index].busy = false;
|
||||
|
||||
// 4. Check the "finished" condition.
|
||||
this._checkFinished();
|
||||
|
||||
// 5. We got a free worker - perhaps we can immediately start another job?
|
||||
void this._scheduleJob();
|
||||
}
|
||||
async _startJobInWorker(index, job) {
|
||||
const stopCallback = () => this.stop().catch(() => {});
|
||||
const jobDispatcher = new JobDispatcher(job, this._reporter, this._failureTracker, stopCallback);
|
||||
if (jobDispatcher.skipWholeJob()) return;
|
||||
let worker = this._workerSlots[index].worker;
|
||||
|
||||
// 1. Restart the worker if it has the wrong hash or is being stopped already.
|
||||
if (worker && (worker.hash() !== job.workerHash || worker.didSendStop())) {
|
||||
await worker.stop();
|
||||
worker = undefined;
|
||||
if (this._isStopped)
|
||||
// Check stopped signal after async hop.
|
||||
return;
|
||||
}
|
||||
this._workerSlots[index].jobDispatcher = jobDispatcher;
|
||||
|
||||
// 2. Start the worker if it is down.
|
||||
let startError;
|
||||
if (!worker) {
|
||||
worker = this._createWorker(job, index, (0, _ipc.serializeConfig)(this._config, true));
|
||||
this._workerSlots[index].worker = worker;
|
||||
worker.on('exit', () => this._workerSlots[index].worker = undefined);
|
||||
startError = await worker.start();
|
||||
if (this._isStopped)
|
||||
// Check stopped signal after async hop.
|
||||
return;
|
||||
}
|
||||
|
||||
// 3. Run the job.
|
||||
if (startError) jobDispatcher.onExit(startError);else jobDispatcher.runInWorker(worker);
|
||||
const result = await jobDispatcher.jobResult;
|
||||
this._workerSlots[index].jobDispatcher = undefined;
|
||||
this._updateCounterForWorkerHash(job.workerHash, -1);
|
||||
|
||||
// 4. When worker encounters error, we stop it and create a new one.
|
||||
// We also do not keep the worker alive if it cannot serve any more jobs.
|
||||
if (result.didFail) void worker.stop(true /* didFail */);else if (this._isWorkerRedundant(worker)) void worker.stop();
|
||||
|
||||
// 5. Possibly schedule a new job with leftover tests and/or retries.
|
||||
if (!this._isStopped && result.newJob) {
|
||||
this._queue.unshift(result.newJob);
|
||||
this._updateCounterForWorkerHash(job.workerHash, +1);
|
||||
}
|
||||
}
|
||||
_checkFinished() {
|
||||
if (this._finished.isDone()) return;
|
||||
|
||||
// Check that we have no more work to do.
|
||||
if (this._queue.length && !this._isStopped) return;
|
||||
|
||||
// Make sure all workers have finished the current job.
|
||||
if (this._workerSlots.some(w => w.busy)) return;
|
||||
this._finished.resolve();
|
||||
}
|
||||
_isWorkerRedundant(worker) {
|
||||
let workersWithSameHash = 0;
|
||||
for (const slot of this._workerSlots) {
|
||||
if (slot.worker && !slot.worker.didSendStop() && slot.worker.hash() === worker.hash()) workersWithSameHash++;
|
||||
}
|
||||
return workersWithSameHash > this._queuedOrRunningHashCount.get(worker.hash());
|
||||
}
|
||||
_updateCounterForWorkerHash(hash, delta) {
|
||||
this._queuedOrRunningHashCount.set(hash, delta + (this._queuedOrRunningHashCount.get(hash) || 0));
|
||||
}
|
||||
async run(testGroups, extraEnvByProjectId) {
|
||||
this._extraEnvByProjectId = extraEnvByProjectId;
|
||||
this._queue = testGroups;
|
||||
for (const group of testGroups) this._updateCounterForWorkerHash(group.workerHash, +1);
|
||||
this._isStopped = false;
|
||||
this._workerSlots = [];
|
||||
// 0. Stop right away if we have reached max failures.
|
||||
if (this._failureTracker.hasReachedMaxFailures()) void this.stop();
|
||||
// 1. Allocate workers.
|
||||
for (let i = 0; i < this._config.config.workers; i++) this._workerSlots.push({
|
||||
busy: false
|
||||
});
|
||||
// 2. Schedule enough jobs.
|
||||
for (let i = 0; i < this._workerSlots.length; i++) void this._scheduleJob();
|
||||
this._checkFinished();
|
||||
// 3. More jobs are scheduled when the worker becomes free.
|
||||
// 4. Wait for all jobs to finish.
|
||||
await this._finished;
|
||||
}
|
||||
_createWorker(testGroup, parallelIndex, loaderData) {
|
||||
const projectConfig = this._config.projects.find(p => p.id === testGroup.projectId);
|
||||
const outputDir = projectConfig.project.outputDir;
|
||||
const worker = new _workerHost.WorkerHost(testGroup, parallelIndex, loaderData, this._extraEnvByProjectId.get(testGroup.projectId) || {}, outputDir);
|
||||
const handleOutput = params => {
|
||||
var _this$_workerSlots$pa;
|
||||
const chunk = chunkFromParams(params);
|
||||
if (worker.didFail()) {
|
||||
// Note: we keep reading stdio from workers that are currently stopping after failure,
|
||||
// to debug teardown issues. However, we avoid spoiling the test result from
|
||||
// the next retry.
|
||||
return {
|
||||
chunk
|
||||
};
|
||||
}
|
||||
const currentlyRunning = (_this$_workerSlots$pa = this._workerSlots[parallelIndex].jobDispatcher) === null || _this$_workerSlots$pa === void 0 ? void 0 : _this$_workerSlots$pa.currentlyRunning();
|
||||
if (!currentlyRunning) return {
|
||||
chunk
|
||||
};
|
||||
return {
|
||||
chunk,
|
||||
test: currentlyRunning.test,
|
||||
result: currentlyRunning.result
|
||||
};
|
||||
};
|
||||
worker.on('stdOut', params => {
|
||||
var _this$_reporter$onStd, _this$_reporter;
|
||||
const {
|
||||
chunk,
|
||||
test,
|
||||
result
|
||||
} = handleOutput(params);
|
||||
result === null || result === void 0 || result.stdout.push(chunk);
|
||||
(_this$_reporter$onStd = (_this$_reporter = this._reporter).onStdOut) === null || _this$_reporter$onStd === void 0 || _this$_reporter$onStd.call(_this$_reporter, chunk, test, result);
|
||||
});
|
||||
worker.on('stdErr', params => {
|
||||
var _this$_reporter$onStd2, _this$_reporter2;
|
||||
const {
|
||||
chunk,
|
||||
test,
|
||||
result
|
||||
} = handleOutput(params);
|
||||
result === null || result === void 0 || result.stderr.push(chunk);
|
||||
(_this$_reporter$onStd2 = (_this$_reporter2 = this._reporter).onStdErr) === null || _this$_reporter$onStd2 === void 0 || _this$_reporter$onStd2.call(_this$_reporter2, chunk, test, result);
|
||||
});
|
||||
worker.on('teardownErrors', params => {
|
||||
this._failureTracker.onWorkerError();
|
||||
for (const error of params.fatalErrors) {
|
||||
var _this$_reporter$onErr, _this$_reporter3;
|
||||
(_this$_reporter$onErr = (_this$_reporter3 = this._reporter).onError) === null || _this$_reporter$onErr === void 0 || _this$_reporter$onErr.call(_this$_reporter3, error);
|
||||
}
|
||||
});
|
||||
worker.on('exit', () => {
|
||||
const producedEnv = this._producedEnvByProjectId.get(testGroup.projectId) || {};
|
||||
this._producedEnvByProjectId.set(testGroup.projectId, {
|
||||
...producedEnv,
|
||||
...worker.producedEnv()
|
||||
});
|
||||
});
|
||||
return worker;
|
||||
}
|
||||
producedEnvByProjectId() {
|
||||
return this._producedEnvByProjectId;
|
||||
}
|
||||
async stop() {
|
||||
if (this._isStopped) return;
|
||||
this._isStopped = true;
|
||||
await Promise.all(this._workerSlots.map(({
|
||||
worker
|
||||
}) => worker === null || worker === void 0 ? void 0 : worker.stop()));
|
||||
this._checkFinished();
|
||||
}
|
||||
}
|
||||
exports.Dispatcher = Dispatcher;
|
||||
class JobDispatcher {
|
||||
constructor(_job, _reporter, _failureTracker, _stopCallback) {
|
||||
this.jobResult = new _utils.ManualPromise();
|
||||
this._listeners = [];
|
||||
this._failedTests = new Set();
|
||||
this._failedWithNonRetriableError = new Set();
|
||||
this._remainingByTestId = new Map();
|
||||
this._dataByTestId = new Map();
|
||||
this._parallelIndex = 0;
|
||||
this._workerIndex = 0;
|
||||
this._currentlyRunning = void 0;
|
||||
this._job = _job;
|
||||
this._reporter = _reporter;
|
||||
this._failureTracker = _failureTracker;
|
||||
this._stopCallback = _stopCallback;
|
||||
this._remainingByTestId = new Map(this._job.tests.map(e => [e.id, e]));
|
||||
}
|
||||
_onTestBegin(params) {
|
||||
var _this$_reporter$onTes, _this$_reporter4;
|
||||
const test = this._remainingByTestId.get(params.testId);
|
||||
if (!test) {
|
||||
// TODO: this should never be the case, report an internal error?
|
||||
return;
|
||||
}
|
||||
const result = test._appendTestResult();
|
||||
this._dataByTestId.set(test.id, {
|
||||
test,
|
||||
result,
|
||||
steps: new Map()
|
||||
});
|
||||
result.parallelIndex = this._parallelIndex;
|
||||
result.workerIndex = this._workerIndex;
|
||||
result.startTime = new Date(params.startWallTime);
|
||||
(_this$_reporter$onTes = (_this$_reporter4 = this._reporter).onTestBegin) === null || _this$_reporter$onTes === void 0 || _this$_reporter$onTes.call(_this$_reporter4, test, result);
|
||||
this._currentlyRunning = {
|
||||
test,
|
||||
result
|
||||
};
|
||||
}
|
||||
_onTestEnd(params) {
|
||||
if (this._failureTracker.hasReachedMaxFailures()) {
|
||||
// Do not show more than one error to avoid confusion, but report
|
||||
// as interrupted to indicate that we did actually start the test.
|
||||
params.status = 'interrupted';
|
||||
params.errors = [];
|
||||
}
|
||||
const data = this._dataByTestId.get(params.testId);
|
||||
if (!data) {
|
||||
// TODO: this should never be the case, report an internal error?
|
||||
return;
|
||||
}
|
||||
this._dataByTestId.delete(params.testId);
|
||||
this._remainingByTestId.delete(params.testId);
|
||||
const {
|
||||
result,
|
||||
test
|
||||
} = data;
|
||||
result.duration = params.duration;
|
||||
result.errors = params.errors;
|
||||
result.error = result.errors[0];
|
||||
result.status = params.status;
|
||||
test.expectedStatus = params.expectedStatus;
|
||||
test.annotations = params.annotations;
|
||||
test.timeout = params.timeout;
|
||||
const isFailure = result.status !== 'skipped' && result.status !== test.expectedStatus;
|
||||
if (isFailure) this._failedTests.add(test);
|
||||
if (params.hasNonRetriableError) this._addNonretriableTestAndSerialModeParents(test);
|
||||
this._reportTestEnd(test, result);
|
||||
this._currentlyRunning = undefined;
|
||||
}
|
||||
_addNonretriableTestAndSerialModeParents(test) {
|
||||
this._failedWithNonRetriableError.add(test);
|
||||
for (let parent = test.parent; parent; parent = parent.parent) {
|
||||
if (parent._parallelMode === 'serial') this._failedWithNonRetriableError.add(parent);
|
||||
}
|
||||
}
|
||||
_onStepBegin(params) {
|
||||
var _this$_reporter$onSte, _this$_reporter5;
|
||||
const data = this._dataByTestId.get(params.testId);
|
||||
if (!data) {
|
||||
// The test has finished, but steps are still coming. Just ignore them.
|
||||
return;
|
||||
}
|
||||
const {
|
||||
result,
|
||||
steps,
|
||||
test
|
||||
} = data;
|
||||
const parentStep = params.parentStepId ? steps.get(params.parentStepId) : undefined;
|
||||
const step = {
|
||||
title: params.title,
|
||||
titlePath: () => {
|
||||
const parentPath = (parentStep === null || parentStep === void 0 ? void 0 : parentStep.titlePath()) || [];
|
||||
return [...parentPath, params.title];
|
||||
},
|
||||
parent: parentStep,
|
||||
category: params.category,
|
||||
startTime: new Date(params.wallTime),
|
||||
duration: -1,
|
||||
steps: [],
|
||||
attachments: [],
|
||||
annotations: [],
|
||||
location: params.location
|
||||
};
|
||||
steps.set(params.stepId, step);
|
||||
(parentStep || result).steps.push(step);
|
||||
(_this$_reporter$onSte = (_this$_reporter5 = this._reporter).onStepBegin) === null || _this$_reporter$onSte === void 0 || _this$_reporter$onSte.call(_this$_reporter5, test, result, step);
|
||||
}
|
||||
_onStepEnd(params) {
|
||||
var _this$_reporter$onSte2, _this$_reporter7;
|
||||
const data = this._dataByTestId.get(params.testId);
|
||||
if (!data) {
|
||||
// The test has finished, but steps are still coming. Just ignore them.
|
||||
return;
|
||||
}
|
||||
const {
|
||||
result,
|
||||
steps,
|
||||
test
|
||||
} = data;
|
||||
const step = steps.get(params.stepId);
|
||||
if (!step) {
|
||||
var _this$_reporter$onStd3, _this$_reporter6;
|
||||
(_this$_reporter$onStd3 = (_this$_reporter6 = this._reporter).onStdErr) === null || _this$_reporter$onStd3 === void 0 || _this$_reporter$onStd3.call(_this$_reporter6, 'Internal error: step end without step begin: ' + params.stepId, test, result);
|
||||
return;
|
||||
}
|
||||
step.duration = params.wallTime - step.startTime.getTime();
|
||||
if (params.error) step.error = params.error;
|
||||
if (params.suggestedRebaseline) (0, _rebase.addSuggestedRebaseline)(step.location, params.suggestedRebaseline);
|
||||
step.annotations = params.annotations;
|
||||
steps.delete(params.stepId);
|
||||
(_this$_reporter$onSte2 = (_this$_reporter7 = this._reporter).onStepEnd) === null || _this$_reporter$onSte2 === void 0 || _this$_reporter$onSte2.call(_this$_reporter7, test, result, step);
|
||||
}
|
||||
_onAttach(params) {
|
||||
const data = this._dataByTestId.get(params.testId);
|
||||
if (!data) {
|
||||
// The test has finished, but attachments are still coming. Just ignore them.
|
||||
return;
|
||||
}
|
||||
const attachment = {
|
||||
name: params.name,
|
||||
path: params.path,
|
||||
contentType: params.contentType,
|
||||
body: params.body !== undefined ? Buffer.from(params.body, 'base64') : undefined
|
||||
};
|
||||
data.result.attachments.push(attachment);
|
||||
if (params.stepId) {
|
||||
var _this$_reporter$onStd4, _this$_reporter8;
|
||||
const step = data.steps.get(params.stepId);
|
||||
if (step) step.attachments.push(attachment);else (_this$_reporter$onStd4 = (_this$_reporter8 = this._reporter).onStdErr) === null || _this$_reporter$onStd4 === void 0 || _this$_reporter$onStd4.call(_this$_reporter8, 'Internal error: step id not found: ' + params.stepId);
|
||||
}
|
||||
}
|
||||
_failTestWithErrors(test, errors) {
|
||||
const runData = this._dataByTestId.get(test.id);
|
||||
// There might be a single test that has started but has not finished yet.
|
||||
let result;
|
||||
if (runData) {
|
||||
result = runData.result;
|
||||
} else {
|
||||
var _this$_reporter$onTes2, _this$_reporter9;
|
||||
result = test._appendTestResult();
|
||||
(_this$_reporter$onTes2 = (_this$_reporter9 = this._reporter).onTestBegin) === null || _this$_reporter$onTes2 === void 0 || _this$_reporter$onTes2.call(_this$_reporter9, test, result);
|
||||
}
|
||||
result.errors = [...errors];
|
||||
result.error = result.errors[0];
|
||||
result.status = errors.length ? 'failed' : 'skipped';
|
||||
this._reportTestEnd(test, result);
|
||||
this._failedTests.add(test);
|
||||
}
|
||||
_massSkipTestsFromRemaining(testIds, errors) {
|
||||
for (const test of this._remainingByTestId.values()) {
|
||||
if (!testIds.has(test.id)) continue;
|
||||
if (!this._failureTracker.hasReachedMaxFailures()) {
|
||||
this._failTestWithErrors(test, errors);
|
||||
errors = []; // Only report errors for the first test.
|
||||
}
|
||||
this._remainingByTestId.delete(test.id);
|
||||
}
|
||||
if (errors.length) {
|
||||
// We had fatal errors after all tests have passed - most likely in some teardown.
|
||||
// Let's just fail the test run.
|
||||
this._failureTracker.onWorkerError();
|
||||
for (const error of errors) {
|
||||
var _this$_reporter$onErr2, _this$_reporter10;
|
||||
(_this$_reporter$onErr2 = (_this$_reporter10 = this._reporter).onError) === null || _this$_reporter$onErr2 === void 0 || _this$_reporter$onErr2.call(_this$_reporter10, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
_onDone(params) {
|
||||
// We won't file remaining if:
|
||||
// - there are no remaining
|
||||
// - we are here not because something failed
|
||||
// - no unrecoverable worker error
|
||||
if (!this._remainingByTestId.size && !this._failedTests.size && !params.fatalErrors.length && !params.skipTestsDueToSetupFailure.length && !params.fatalUnknownTestIds && !params.unexpectedExitError) {
|
||||
this._finished({
|
||||
didFail: false
|
||||
});
|
||||
return;
|
||||
}
|
||||
for (const testId of params.fatalUnknownTestIds || []) {
|
||||
const test = this._remainingByTestId.get(testId);
|
||||
if (test) {
|
||||
this._remainingByTestId.delete(testId);
|
||||
this._failTestWithErrors(test, [{
|
||||
message: `Test not found in the worker process. Make sure test title does not change.`
|
||||
}]);
|
||||
}
|
||||
}
|
||||
if (params.fatalErrors.length) {
|
||||
// In case of fatal errors, report first remaining test as failing with these errors,
|
||||
// and all others as skipped.
|
||||
this._massSkipTestsFromRemaining(new Set(this._remainingByTestId.keys()), params.fatalErrors);
|
||||
}
|
||||
// Handle tests that should be skipped because of the setup failure.
|
||||
this._massSkipTestsFromRemaining(new Set(params.skipTestsDueToSetupFailure), []);
|
||||
if (params.unexpectedExitError) {
|
||||
// When worker exits during a test, we blame the test itself.
|
||||
//
|
||||
// The most common situation when worker exits while not running a test is:
|
||||
// worker failed to require the test file (at the start) because of an exception in one of imports.
|
||||
// In this case, "skip" all remaining tests, to avoid running into the same exception over and over.
|
||||
if (this._currentlyRunning) this._massSkipTestsFromRemaining(new Set([this._currentlyRunning.test.id]), [params.unexpectedExitError]);else this._massSkipTestsFromRemaining(new Set(this._remainingByTestId.keys()), [params.unexpectedExitError]);
|
||||
}
|
||||
const retryCandidates = new Set();
|
||||
const serialSuitesWithFailures = new Set();
|
||||
for (const failedTest of this._failedTests) {
|
||||
if (this._failedWithNonRetriableError.has(failedTest)) continue;
|
||||
retryCandidates.add(failedTest);
|
||||
let outermostSerialSuite;
|
||||
for (let parent = failedTest.parent; parent; parent = parent.parent) {
|
||||
if (parent._parallelMode === 'serial') outermostSerialSuite = parent;
|
||||
}
|
||||
if (outermostSerialSuite && !this._failedWithNonRetriableError.has(outermostSerialSuite)) serialSuitesWithFailures.add(outermostSerialSuite);
|
||||
}
|
||||
|
||||
// If we have failed tests that belong to a serial suite,
|
||||
// we should skip all future tests from the same serial suite.
|
||||
const testsBelongingToSomeSerialSuiteWithFailures = [...this._remainingByTestId.values()].filter(test => {
|
||||
let parent = test.parent;
|
||||
while (parent && !serialSuitesWithFailures.has(parent)) parent = parent.parent;
|
||||
return !!parent;
|
||||
});
|
||||
this._massSkipTestsFromRemaining(new Set(testsBelongingToSomeSerialSuiteWithFailures.map(test => test.id)), []);
|
||||
for (const serialSuite of serialSuitesWithFailures) {
|
||||
// Add all tests from failed serial suites for possible retry.
|
||||
// These will only be retried together, because they have the same
|
||||
// "retries" setting and the same number of previous runs.
|
||||
serialSuite.allTests().forEach(test => retryCandidates.add(test));
|
||||
}
|
||||
const remaining = [...this._remainingByTestId.values()];
|
||||
for (const test of retryCandidates) {
|
||||
if (test.results.length < test.retries + 1) remaining.push(test);
|
||||
}
|
||||
|
||||
// This job is over, we will schedule another one.
|
||||
const newJob = remaining.length ? {
|
||||
...this._job,
|
||||
tests: remaining
|
||||
} : undefined;
|
||||
this._finished({
|
||||
didFail: true,
|
||||
newJob
|
||||
});
|
||||
}
|
||||
onExit(data) {
|
||||
const unexpectedExitError = data.unexpectedly ? {
|
||||
message: `Error: worker process exited unexpectedly (code=${data.code}, signal=${data.signal})`
|
||||
} : undefined;
|
||||
this._onDone({
|
||||
skipTestsDueToSetupFailure: [],
|
||||
fatalErrors: [],
|
||||
unexpectedExitError
|
||||
});
|
||||
}
|
||||
_finished(result) {
|
||||
_utils.eventsHelper.removeEventListeners(this._listeners);
|
||||
this.jobResult.resolve(result);
|
||||
}
|
||||
runInWorker(worker) {
|
||||
this._parallelIndex = worker.parallelIndex;
|
||||
this._workerIndex = worker.workerIndex;
|
||||
const runPayload = {
|
||||
file: this._job.requireFile,
|
||||
entries: this._job.tests.map(test => {
|
||||
return {
|
||||
testId: test.id,
|
||||
retry: test.results.length
|
||||
};
|
||||
})
|
||||
};
|
||||
worker.runTestGroup(runPayload);
|
||||
this._listeners = [_utils.eventsHelper.addEventListener(worker, 'testBegin', this._onTestBegin.bind(this)), _utils.eventsHelper.addEventListener(worker, 'testEnd', this._onTestEnd.bind(this)), _utils.eventsHelper.addEventListener(worker, 'stepBegin', this._onStepBegin.bind(this)), _utils.eventsHelper.addEventListener(worker, 'stepEnd', this._onStepEnd.bind(this)), _utils.eventsHelper.addEventListener(worker, 'attach', this._onAttach.bind(this)), _utils.eventsHelper.addEventListener(worker, 'done', this._onDone.bind(this)), _utils.eventsHelper.addEventListener(worker, 'exit', this.onExit.bind(this))];
|
||||
}
|
||||
skipWholeJob() {
|
||||
// If all the tests in a group are skipped, we report them immediately
|
||||
// without sending anything to a worker. This avoids creating unnecessary worker processes.
|
||||
//
|
||||
// However, if there is at least one non-skipped test in a group, we'll send
|
||||
// the whole group to the worker process and report tests in the natural order,
|
||||
// with skipped tests mixed in-between non-skipped. This makes
|
||||
// for a better reporter experience.
|
||||
const allTestsSkipped = this._job.tests.every(test => test.expectedStatus === 'skipped');
|
||||
if (allTestsSkipped && !this._failureTracker.hasReachedMaxFailures()) {
|
||||
for (const test of this._job.tests) {
|
||||
var _this$_reporter$onTes3, _this$_reporter11;
|
||||
const result = test._appendTestResult();
|
||||
(_this$_reporter$onTes3 = (_this$_reporter11 = this._reporter).onTestBegin) === null || _this$_reporter$onTes3 === void 0 || _this$_reporter$onTes3.call(_this$_reporter11, test, result);
|
||||
result.status = 'skipped';
|
||||
this._reportTestEnd(test, result);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
currentlyRunning() {
|
||||
return this._currentlyRunning;
|
||||
}
|
||||
_reportTestEnd(test, result) {
|
||||
var _this$_reporter$onTes4, _this$_reporter12;
|
||||
(_this$_reporter$onTes4 = (_this$_reporter12 = this._reporter).onTestEnd) === null || _this$_reporter$onTes4 === void 0 || _this$_reporter$onTes4.call(_this$_reporter12, test, result);
|
||||
const hadMaxFailures = this._failureTracker.hasReachedMaxFailures();
|
||||
this._failureTracker.onTestEnd(test, result);
|
||||
if (this._failureTracker.hasReachedMaxFailures()) {
|
||||
var _this$_reporter$onErr3, _this$_reporter13;
|
||||
this._stopCallback();
|
||||
if (!hadMaxFailures) (_this$_reporter$onErr3 = (_this$_reporter13 = this._reporter).onError) === null || _this$_reporter$onErr3 === void 0 || _this$_reporter$onErr3.call(_this$_reporter13, {
|
||||
message: _utils.colors.red(`Testing stopped early after ${this._failureTracker.maxFailures()} maximum allowed failures.`)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
function chunkFromParams(params) {
|
||||
if (typeof params.text === 'string') return params.text;
|
||||
return Buffer.from(params.buffer, 'base64');
|
||||
}
|
||||
61
node_modules/playwright/lib/runner/failureTracker.js
generated
vendored
Normal file
61
node_modules/playwright/lib/runner/failureTracker.js
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.FailureTracker = void 0;
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
class FailureTracker {
|
||||
constructor(_config) {
|
||||
this._failureCount = 0;
|
||||
this._hasWorkerErrors = false;
|
||||
this._rootSuite = void 0;
|
||||
this._config = _config;
|
||||
}
|
||||
onRootSuite(rootSuite) {
|
||||
this._rootSuite = rootSuite;
|
||||
}
|
||||
onTestEnd(test, result) {
|
||||
// Test is considered failing after the last retry.
|
||||
if (test.outcome() === 'unexpected' && test.results.length > test.retries) ++this._failureCount;
|
||||
}
|
||||
onWorkerError() {
|
||||
this._hasWorkerErrors = true;
|
||||
}
|
||||
hasReachedMaxFailures() {
|
||||
return this.maxFailures() > 0 && this._failureCount >= this.maxFailures();
|
||||
}
|
||||
hasWorkerErrors() {
|
||||
return this._hasWorkerErrors;
|
||||
}
|
||||
result() {
|
||||
return this._hasWorkerErrors || this.hasReachedMaxFailures() || this.hasFailedTests() || this._config.cliFailOnFlakyTests && this.hasFlakyTests() ? 'failed' : 'passed';
|
||||
}
|
||||
hasFailedTests() {
|
||||
var _this$_rootSuite;
|
||||
return (_this$_rootSuite = this._rootSuite) === null || _this$_rootSuite === void 0 ? void 0 : _this$_rootSuite.allTests().some(test => !test.ok());
|
||||
}
|
||||
hasFlakyTests() {
|
||||
var _this$_rootSuite2;
|
||||
return (_this$_rootSuite2 = this._rootSuite) === null || _this$_rootSuite2 === void 0 ? void 0 : _this$_rootSuite2.allTests().some(test => test.outcome() === 'flaky');
|
||||
}
|
||||
maxFailures() {
|
||||
return this._config.config.maxFailures;
|
||||
}
|
||||
}
|
||||
exports.FailureTracker = FailureTracker;
|
||||
66
node_modules/playwright/lib/runner/lastRun.js
generated
vendored
Normal file
66
node_modules/playwright/lib/runner/lastRun.js
generated
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.LastRunReporter = void 0;
|
||||
var _fs = _interopRequireDefault(require("fs"));
|
||||
var _path = _interopRequireDefault(require("path"));
|
||||
var _projectUtils = require("./projectUtils");
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
class LastRunReporter {
|
||||
constructor(config) {
|
||||
this._config = void 0;
|
||||
this._lastRunFile = void 0;
|
||||
this._suite = void 0;
|
||||
this._config = config;
|
||||
const [project] = (0, _projectUtils.filterProjects)(config.projects, config.cliProjectFilter);
|
||||
if (project) this._lastRunFile = _path.default.join(project.project.outputDir, '.last-run.json');
|
||||
}
|
||||
async filterLastFailed() {
|
||||
if (!this._lastRunFile) return;
|
||||
try {
|
||||
const lastRunInfo = JSON.parse(await _fs.default.promises.readFile(this._lastRunFile, 'utf8'));
|
||||
this._config.lastFailedTestIdMatcher = id => lastRunInfo.failedTests.includes(id);
|
||||
} catch {}
|
||||
}
|
||||
version() {
|
||||
return 'v2';
|
||||
}
|
||||
printsToStdio() {
|
||||
return false;
|
||||
}
|
||||
onBegin(suite) {
|
||||
this._suite = suite;
|
||||
}
|
||||
async onEnd(result) {
|
||||
var _this$_suite;
|
||||
if (!this._lastRunFile || this._config.cliListOnly) return;
|
||||
await _fs.default.promises.mkdir(_path.default.dirname(this._lastRunFile), {
|
||||
recursive: true
|
||||
});
|
||||
const failedTests = (_this$_suite = this._suite) === null || _this$_suite === void 0 ? void 0 : _this$_suite.allTests().filter(t => !t.ok()).map(t => t.id);
|
||||
const lastRunReport = JSON.stringify({
|
||||
status: result.status,
|
||||
failedTests
|
||||
}, undefined, 2);
|
||||
await _fs.default.promises.writeFile(this._lastRunFile, lastRunReport);
|
||||
}
|
||||
}
|
||||
exports.LastRunReporter = LastRunReporter;
|
||||
315
node_modules/playwright/lib/runner/loadUtils.js
generated
vendored
Normal file
315
node_modules/playwright/lib/runner/loadUtils.js
generated
vendored
Normal file
@@ -0,0 +1,315 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.collectProjectsAndTestFiles = collectProjectsAndTestFiles;
|
||||
exports.createRootSuite = createRootSuite;
|
||||
exports.loadFileSuites = loadFileSuites;
|
||||
exports.loadGlobalHook = loadGlobalHook;
|
||||
exports.loadReporter = loadReporter;
|
||||
var _path = _interopRequireDefault(require("path"));
|
||||
var _loaderHost = require("./loaderHost");
|
||||
var _util = require("../util");
|
||||
var _projectUtils = require("./projectUtils");
|
||||
var _testGroups = require("./testGroups");
|
||||
var _suiteUtils = require("../common/suiteUtils");
|
||||
var _test = require("../common/test");
|
||||
var _compilationCache = require("../transform/compilationCache");
|
||||
var _transform = require("../transform/transform");
|
||||
var _utilsBundle = require("../utilsBundle");
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
async function collectProjectsAndTestFiles(testRun, doNotRunTestsOutsideProjectFilter) {
|
||||
const config = testRun.config;
|
||||
const fsCache = new Map();
|
||||
const sourceMapCache = new Map();
|
||||
const cliFileMatcher = config.cliArgs.length ? (0, _util.createFileMatcherFromArguments)(config.cliArgs) : null;
|
||||
|
||||
// First collect all files for the projects in the command line, don't apply any file filters.
|
||||
const allFilesForProject = new Map();
|
||||
const filteredProjects = (0, _projectUtils.filterProjects)(config.projects, config.cliProjectFilter);
|
||||
for (const project of filteredProjects) {
|
||||
const files = await (0, _projectUtils.collectFilesForProject)(project, fsCache);
|
||||
allFilesForProject.set(project, files);
|
||||
}
|
||||
|
||||
// Filter files based on the file filters, eliminate the empty projects.
|
||||
const filesToRunByProject = new Map();
|
||||
for (const [project, files] of allFilesForProject) {
|
||||
const matchedFiles = files.filter(file => {
|
||||
const hasMatchingSources = sourceMapSources(file, sourceMapCache).some(source => {
|
||||
if (cliFileMatcher && !cliFileMatcher(source)) return false;
|
||||
return true;
|
||||
});
|
||||
return hasMatchingSources;
|
||||
});
|
||||
const filteredFiles = matchedFiles.filter(Boolean);
|
||||
filesToRunByProject.set(project, filteredFiles);
|
||||
}
|
||||
|
||||
// (Re-)add all files for dependent projects, disregard filters.
|
||||
const projectClosure = (0, _projectUtils.buildProjectsClosure)([...filesToRunByProject.keys()]);
|
||||
for (const [project, type] of projectClosure) {
|
||||
if (type === 'dependency') {
|
||||
const treatProjectAsEmpty = doNotRunTestsOutsideProjectFilter && !filteredProjects.includes(project);
|
||||
const files = treatProjectAsEmpty ? [] : allFilesForProject.get(project) || (await (0, _projectUtils.collectFilesForProject)(project, fsCache));
|
||||
filesToRunByProject.set(project, files);
|
||||
}
|
||||
}
|
||||
testRun.projectFiles = filesToRunByProject;
|
||||
testRun.projectSuites = new Map();
|
||||
}
|
||||
async function loadFileSuites(testRun, mode, errors) {
|
||||
// Determine all files to load.
|
||||
const config = testRun.config;
|
||||
const allTestFiles = new Set();
|
||||
for (const files of testRun.projectFiles.values()) files.forEach(file => allTestFiles.add(file));
|
||||
|
||||
// Load test files.
|
||||
const fileSuiteByFile = new Map();
|
||||
const loaderHost = mode === 'out-of-process' ? new _loaderHost.OutOfProcessLoaderHost(config) : new _loaderHost.InProcessLoaderHost(config);
|
||||
if (await loaderHost.start(errors)) {
|
||||
for (const file of allTestFiles) {
|
||||
const fileSuite = await loaderHost.loadTestFile(file, errors);
|
||||
fileSuiteByFile.set(file, fileSuite);
|
||||
errors.push(...createDuplicateTitlesErrors(config, fileSuite));
|
||||
}
|
||||
await loaderHost.stop();
|
||||
}
|
||||
|
||||
// Check that no test file imports another test file.
|
||||
// Loader must be stopped first, since it populates the dependency tree.
|
||||
for (const file of allTestFiles) {
|
||||
for (const dependency of (0, _compilationCache.dependenciesForTestFile)(file)) {
|
||||
if (allTestFiles.has(dependency)) {
|
||||
const importer = _path.default.relative(config.config.rootDir, file);
|
||||
const importee = _path.default.relative(config.config.rootDir, dependency);
|
||||
errors.push({
|
||||
message: `Error: test file "${importer}" should not import test file "${importee}"`,
|
||||
location: {
|
||||
file,
|
||||
line: 1,
|
||||
column: 1
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Collect file suites for each project.
|
||||
for (const [project, files] of testRun.projectFiles) {
|
||||
const suites = files.map(file => fileSuiteByFile.get(file)).filter(Boolean);
|
||||
testRun.projectSuites.set(project, suites);
|
||||
}
|
||||
}
|
||||
async function createRootSuite(testRun, errors, shouldFilterOnly, additionalFileMatcher) {
|
||||
const config = testRun.config;
|
||||
// Create root suite, where each child will be a project suite with cloned file suites inside it.
|
||||
const rootSuite = new _test.Suite('', 'root');
|
||||
const projectSuites = new Map();
|
||||
const filteredProjectSuites = new Map();
|
||||
|
||||
// Filter all the projects using grep, testId, file names.
|
||||
{
|
||||
// Interpret cli parameters.
|
||||
const cliFileFilters = (0, _util.createFileFiltersFromArguments)(config.cliArgs);
|
||||
const grepMatcher = config.cliGrep ? (0, _util.createTitleMatcher)((0, _util.forceRegExp)(config.cliGrep)) : () => true;
|
||||
const grepInvertMatcher = config.cliGrepInvert ? (0, _util.createTitleMatcher)((0, _util.forceRegExp)(config.cliGrepInvert)) : () => false;
|
||||
const cliTitleMatcher = title => !grepInvertMatcher(title) && grepMatcher(title);
|
||||
|
||||
// Filter file suites for all projects.
|
||||
for (const [project, fileSuites] of testRun.projectSuites) {
|
||||
const projectSuite = createProjectSuite(project, fileSuites);
|
||||
projectSuites.set(project, projectSuite);
|
||||
const filteredProjectSuite = filterProjectSuite(projectSuite, {
|
||||
cliFileFilters,
|
||||
cliTitleMatcher,
|
||||
testIdMatcher: config.testIdMatcher,
|
||||
additionalFileMatcher
|
||||
});
|
||||
filteredProjectSuites.set(project, filteredProjectSuite);
|
||||
}
|
||||
}
|
||||
if (shouldFilterOnly) {
|
||||
// Create a fake root to execute the exclusive semantics across the projects.
|
||||
const filteredRoot = new _test.Suite('', 'root');
|
||||
for (const filteredProjectSuite of filteredProjectSuites.values()) filteredRoot._addSuite(filteredProjectSuite);
|
||||
(0, _suiteUtils.filterOnly)(filteredRoot);
|
||||
for (const [project, filteredProjectSuite] of filteredProjectSuites) {
|
||||
if (!filteredRoot.suites.includes(filteredProjectSuite)) filteredProjectSuites.delete(project);
|
||||
}
|
||||
}
|
||||
|
||||
// Add post-filtered top-level projects to the root suite for sharding and 'only' processing.
|
||||
const projectClosure = (0, _projectUtils.buildProjectsClosure)([...filteredProjectSuites.keys()], project => filteredProjectSuites.get(project)._hasTests());
|
||||
for (const [project, type] of projectClosure) {
|
||||
if (type === 'top-level') {
|
||||
var _project$fullConfig$c;
|
||||
project.project.repeatEach = (_project$fullConfig$c = project.fullConfig.configCLIOverrides.repeatEach) !== null && _project$fullConfig$c !== void 0 ? _project$fullConfig$c : project.project.repeatEach;
|
||||
rootSuite._addSuite(buildProjectSuite(project, filteredProjectSuites.get(project)));
|
||||
}
|
||||
}
|
||||
|
||||
// Complain about only.
|
||||
if (config.config.forbidOnly) {
|
||||
const onlyTestsAndSuites = rootSuite._getOnlyItems();
|
||||
if (onlyTestsAndSuites.length > 0) {
|
||||
const configFilePath = config.config.configFile ? _path.default.relative(config.config.rootDir, config.config.configFile) : undefined;
|
||||
errors.push(...createForbidOnlyErrors(onlyTestsAndSuites, config.configCLIOverrides.forbidOnly, configFilePath));
|
||||
}
|
||||
}
|
||||
|
||||
// Shard only the top-level projects.
|
||||
if (config.config.shard) {
|
||||
// Create test groups for top-level projects.
|
||||
const testGroups = [];
|
||||
for (const projectSuite of rootSuite.suites) {
|
||||
// Split beforeAll-grouped tests into "config.shard.total" groups when needed.
|
||||
// Later on, we'll re-split them between workers by using "config.workers" instead.
|
||||
testGroups.push(...(0, _testGroups.createTestGroups)(projectSuite, config.config.shard.total));
|
||||
}
|
||||
|
||||
// Shard test groups.
|
||||
const testGroupsInThisShard = (0, _testGroups.filterForShard)(config.config.shard, testGroups);
|
||||
const testsInThisShard = new Set();
|
||||
for (const group of testGroupsInThisShard) {
|
||||
for (const test of group.tests) testsInThisShard.add(test);
|
||||
}
|
||||
|
||||
// Update project suites, removing empty ones.
|
||||
(0, _suiteUtils.filterTestsRemoveEmptySuites)(rootSuite, test => testsInThisShard.has(test));
|
||||
}
|
||||
|
||||
// Explicitly apply --last-failed filter after sharding.
|
||||
if (config.lastFailedTestIdMatcher) (0, _suiteUtils.filterByTestIds)(rootSuite, config.lastFailedTestIdMatcher);
|
||||
|
||||
// Now prepend dependency projects without filtration.
|
||||
{
|
||||
// Filtering 'only' and sharding might have reduced the number of top-level projects.
|
||||
// Build the project closure to only include dependencies that are still needed.
|
||||
const projectClosure = new Map((0, _projectUtils.buildProjectsClosure)(rootSuite.suites.map(suite => suite._fullProject)));
|
||||
|
||||
// Clone file suites for dependency projects.
|
||||
for (const [project, level] of projectClosure.entries()) {
|
||||
if (level === 'dependency') rootSuite._prependSuite(buildProjectSuite(project, projectSuites.get(project)));
|
||||
}
|
||||
}
|
||||
return rootSuite;
|
||||
}
|
||||
function createProjectSuite(project, fileSuites) {
|
||||
const projectSuite = new _test.Suite(project.project.name, 'project');
|
||||
for (const fileSuite of fileSuites) projectSuite._addSuite((0, _suiteUtils.bindFileSuiteToProject)(project, fileSuite));
|
||||
const grepMatcher = (0, _util.createTitleMatcher)(project.project.grep);
|
||||
const grepInvertMatcher = project.project.grepInvert ? (0, _util.createTitleMatcher)(project.project.grepInvert) : null;
|
||||
(0, _suiteUtils.filterTestsRemoveEmptySuites)(projectSuite, test => {
|
||||
const grepTitle = test._grepTitle();
|
||||
if (grepInvertMatcher !== null && grepInvertMatcher !== void 0 && grepInvertMatcher(grepTitle)) return false;
|
||||
return grepMatcher(grepTitle);
|
||||
});
|
||||
return projectSuite;
|
||||
}
|
||||
function filterProjectSuite(projectSuite, options) {
|
||||
// Fast path.
|
||||
if (!options.cliFileFilters.length && !options.cliTitleMatcher && !options.testIdMatcher && !options.additionalFileMatcher) return projectSuite;
|
||||
const result = projectSuite._deepClone();
|
||||
if (options.cliFileFilters.length) (0, _suiteUtils.filterByFocusedLine)(result, options.cliFileFilters);
|
||||
if (options.testIdMatcher) (0, _suiteUtils.filterByTestIds)(result, options.testIdMatcher);
|
||||
(0, _suiteUtils.filterTestsRemoveEmptySuites)(result, test => {
|
||||
if (options.cliTitleMatcher && !options.cliTitleMatcher(test._grepTitle())) return false;
|
||||
if (options.additionalFileMatcher && !options.additionalFileMatcher(test.location.file)) return false;
|
||||
return true;
|
||||
});
|
||||
return result;
|
||||
}
|
||||
function buildProjectSuite(project, projectSuite) {
|
||||
const result = new _test.Suite(project.project.name, 'project');
|
||||
result._fullProject = project;
|
||||
if (project.fullyParallel) result._parallelMode = 'parallel';
|
||||
for (const fileSuite of projectSuite.suites) {
|
||||
// Fast path for the repeatEach = 0.
|
||||
result._addSuite(fileSuite);
|
||||
for (let repeatEachIndex = 1; repeatEachIndex < project.project.repeatEach; repeatEachIndex++) {
|
||||
const clone = fileSuite._deepClone();
|
||||
(0, _suiteUtils.applyRepeatEachIndex)(project, clone, repeatEachIndex);
|
||||
result._addSuite(clone);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function createForbidOnlyErrors(onlyTestsAndSuites, forbidOnlyCLIFlag, configFilePath) {
|
||||
const errors = [];
|
||||
for (const testOrSuite of onlyTestsAndSuites) {
|
||||
// Skip root and file.
|
||||
const title = testOrSuite.titlePath().slice(2).join(' ');
|
||||
const configFilePathName = configFilePath ? `'${configFilePath}'` : 'the Playwright configuration file';
|
||||
const forbidOnlySource = forbidOnlyCLIFlag ? `'--forbid-only' CLI flag` : `'forbidOnly' option in ${configFilePathName}`;
|
||||
const error = {
|
||||
message: `Error: item focused with '.only' is not allowed due to the ${forbidOnlySource}: "${title}"`,
|
||||
location: testOrSuite.location
|
||||
};
|
||||
errors.push(error);
|
||||
}
|
||||
return errors;
|
||||
}
|
||||
function createDuplicateTitlesErrors(config, fileSuite) {
|
||||
const errors = [];
|
||||
const testsByFullTitle = new Map();
|
||||
for (const test of fileSuite.allTests()) {
|
||||
const fullTitle = test.titlePath().slice(1).join(' › ');
|
||||
const existingTest = testsByFullTitle.get(fullTitle);
|
||||
if (existingTest) {
|
||||
const error = {
|
||||
message: `Error: duplicate test title "${fullTitle}", first declared in ${buildItemLocation(config.config.rootDir, existingTest)}`,
|
||||
location: test.location
|
||||
};
|
||||
errors.push(error);
|
||||
}
|
||||
testsByFullTitle.set(fullTitle, test);
|
||||
}
|
||||
return errors;
|
||||
}
|
||||
function buildItemLocation(rootDir, testOrSuite) {
|
||||
if (!testOrSuite.location) return '';
|
||||
return `${_path.default.relative(rootDir, testOrSuite.location.file)}:${testOrSuite.location.line}`;
|
||||
}
|
||||
async function requireOrImportDefaultFunction(file, expectConstructor) {
|
||||
let func = await (0, _transform.requireOrImport)(file);
|
||||
if (func && typeof func === 'object' && 'default' in func) func = func['default'];
|
||||
if (typeof func !== 'function') throw (0, _util.errorWithFile)(file, `file must export a single ${expectConstructor ? 'class' : 'function'}.`);
|
||||
return func;
|
||||
}
|
||||
function loadGlobalHook(config, file) {
|
||||
return requireOrImportDefaultFunction(_path.default.resolve(config.config.rootDir, file), false);
|
||||
}
|
||||
function loadReporter(config, file) {
|
||||
return requireOrImportDefaultFunction(config ? _path.default.resolve(config.config.rootDir, file) : file, true);
|
||||
}
|
||||
function sourceMapSources(file, cache) {
|
||||
let sources = [file];
|
||||
if (!file.endsWith('.js')) return sources;
|
||||
if (cache.has(file)) return cache.get(file);
|
||||
try {
|
||||
const sourceMap = _utilsBundle.sourceMapSupport.retrieveSourceMap(file);
|
||||
const sourceMapData = typeof (sourceMap === null || sourceMap === void 0 ? void 0 : sourceMap.map) === 'string' ? JSON.parse(sourceMap.map) : sourceMap === null || sourceMap === void 0 ? void 0 : sourceMap.map;
|
||||
if (sourceMapData !== null && sourceMapData !== void 0 && sourceMapData.sources) sources = sourceMapData.sources.map(source => _path.default.resolve(_path.default.dirname(file), source));
|
||||
} finally {
|
||||
cache.set(file, sources);
|
||||
return sources;
|
||||
}
|
||||
}
|
||||
85
node_modules/playwright/lib/runner/loaderHost.js
generated
vendored
Normal file
85
node_modules/playwright/lib/runner/loaderHost.js
generated
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.OutOfProcessLoaderHost = exports.InProcessLoaderHost = void 0;
|
||||
var _processHost = require("./processHost");
|
||||
var _esmLoaderHost = require("../common/esmLoaderHost");
|
||||
var _ipc = require("../common/ipc");
|
||||
var _poolBuilder = require("../common/poolBuilder");
|
||||
var _test = require("../common/test");
|
||||
var _testLoader = require("../common/testLoader");
|
||||
var _compilationCache = require("../transform/compilationCache");
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
class InProcessLoaderHost {
|
||||
constructor(config) {
|
||||
this._config = void 0;
|
||||
this._poolBuilder = void 0;
|
||||
this._config = config;
|
||||
this._poolBuilder = _poolBuilder.PoolBuilder.createForLoader();
|
||||
}
|
||||
async start(errors) {
|
||||
return true;
|
||||
}
|
||||
async loadTestFile(file, testErrors) {
|
||||
const result = await (0, _testLoader.loadTestFile)(file, this._config.config.rootDir, testErrors);
|
||||
this._poolBuilder.buildPools(result, testErrors);
|
||||
return result;
|
||||
}
|
||||
async stop() {
|
||||
await (0, _esmLoaderHost.incorporateCompilationCache)();
|
||||
}
|
||||
}
|
||||
exports.InProcessLoaderHost = InProcessLoaderHost;
|
||||
class OutOfProcessLoaderHost {
|
||||
constructor(config) {
|
||||
this._config = void 0;
|
||||
this._processHost = void 0;
|
||||
this._config = config;
|
||||
this._processHost = new _processHost.ProcessHost(require.resolve('../loader/loaderMain.js'), 'loader', {});
|
||||
}
|
||||
async start(errors) {
|
||||
const startError = await this._processHost.startRunner((0, _ipc.serializeConfig)(this._config, false));
|
||||
if (startError) {
|
||||
errors.push({
|
||||
message: `Test loader process failed to start with code "${startError.code}" and signal "${startError.signal}"`
|
||||
});
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
async loadTestFile(file, testErrors) {
|
||||
const result = await this._processHost.sendMessage({
|
||||
method: 'loadTestFile',
|
||||
params: {
|
||||
file
|
||||
}
|
||||
});
|
||||
testErrors.push(...result.testErrors);
|
||||
return _test.Suite._deepParse(result.fileSuite);
|
||||
}
|
||||
async stop() {
|
||||
const result = await this._processHost.sendMessage({
|
||||
method: 'getCompilationCacheFromLoader'
|
||||
});
|
||||
(0, _compilationCache.addToCompilationCache)(result);
|
||||
await this._processHost.stop();
|
||||
}
|
||||
}
|
||||
exports.OutOfProcessLoaderHost = OutOfProcessLoaderHost;
|
||||
175
node_modules/playwright/lib/runner/processHost.js
generated
vendored
Normal file
175
node_modules/playwright/lib/runner/processHost.js
generated
vendored
Normal file
@@ -0,0 +1,175 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.ProcessHost = void 0;
|
||||
var _child_process = _interopRequireDefault(require("child_process"));
|
||||
var _events = require("events");
|
||||
var _utils = require("playwright-core/lib/utils");
|
||||
var _utilsBundle = require("playwright-core/lib/utilsBundle");
|
||||
var _esmLoaderHost = require("../common/esmLoaderHost");
|
||||
var _esmUtils = require("../transform/esmUtils");
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
class ProcessHost extends _events.EventEmitter {
|
||||
constructor(runnerScript, processName, env) {
|
||||
super();
|
||||
this.process = void 0;
|
||||
this._didSendStop = false;
|
||||
this._processDidExit = false;
|
||||
this._didExitAndRanOnExit = false;
|
||||
this._runnerScript = void 0;
|
||||
this._lastMessageId = 0;
|
||||
this._callbacks = new Map();
|
||||
this._processName = void 0;
|
||||
this._producedEnv = {};
|
||||
this._extraEnv = void 0;
|
||||
this._runnerScript = runnerScript;
|
||||
this._processName = processName;
|
||||
this._extraEnv = env;
|
||||
}
|
||||
async startRunner(runnerParams, options = {}) {
|
||||
var _this$process$stdout, _this$process$stderr;
|
||||
(0, _utils.assert)(!this.process, 'Internal error: starting the same process twice');
|
||||
this.process = _child_process.default.fork(require.resolve('../common/process'), {
|
||||
detached: false,
|
||||
env: {
|
||||
...process.env,
|
||||
...this._extraEnv,
|
||||
...(_esmLoaderHost.esmLoaderRegistered ? {
|
||||
PW_TS_ESM_LOADER_ON: '1'
|
||||
} : {})
|
||||
},
|
||||
stdio: ['ignore', options.onStdOut ? 'pipe' : 'inherit', options.onStdErr && !process.env.PW_RUNNER_DEBUG ? 'pipe' : 'inherit', 'ipc'],
|
||||
...(process.env.PW_TS_ESM_LEGACY_LOADER_ON ? {
|
||||
execArgv: (0, _esmUtils.execArgvWithExperimentalLoaderOptions)()
|
||||
} : {})
|
||||
});
|
||||
this.process.on('exit', async (code, signal) => {
|
||||
this._processDidExit = true;
|
||||
await this.onExit();
|
||||
this._didExitAndRanOnExit = true;
|
||||
this.emit('exit', {
|
||||
unexpectedly: !this._didSendStop,
|
||||
code,
|
||||
signal
|
||||
});
|
||||
});
|
||||
this.process.on('error', e => {}); // do not yell at a send to dead process.
|
||||
this.process.on('message', message => {
|
||||
if (_utilsBundle.debug.enabled('pw:test:protocol')) (0, _utilsBundle.debug)('pw:test:protocol')('◀ RECV ' + JSON.stringify(message));
|
||||
if (message.method === '__env_produced__') {
|
||||
const producedEnv = message.params;
|
||||
this._producedEnv = Object.fromEntries(producedEnv.map(e => {
|
||||
var _e$;
|
||||
return [e[0], (_e$ = e[1]) !== null && _e$ !== void 0 ? _e$ : undefined];
|
||||
}));
|
||||
} else if (message.method === '__dispatch__') {
|
||||
const {
|
||||
id,
|
||||
error,
|
||||
method,
|
||||
params,
|
||||
result
|
||||
} = message.params;
|
||||
if (id && this._callbacks.has(id)) {
|
||||
const {
|
||||
resolve,
|
||||
reject
|
||||
} = this._callbacks.get(id);
|
||||
this._callbacks.delete(id);
|
||||
if (error) {
|
||||
const errorObject = new Error(error.message);
|
||||
errorObject.stack = error.stack;
|
||||
reject(errorObject);
|
||||
} else {
|
||||
resolve(result);
|
||||
}
|
||||
} else {
|
||||
this.emit(method, params);
|
||||
}
|
||||
} else {
|
||||
this.emit(message.method, message.params);
|
||||
}
|
||||
});
|
||||
if (options.onStdOut) (_this$process$stdout = this.process.stdout) === null || _this$process$stdout === void 0 || _this$process$stdout.on('data', options.onStdOut);
|
||||
if (options.onStdErr) (_this$process$stderr = this.process.stderr) === null || _this$process$stderr === void 0 || _this$process$stderr.on('data', options.onStdErr);
|
||||
const error = await new Promise(resolve => {
|
||||
this.process.once('exit', (code, signal) => resolve({
|
||||
unexpectedly: true,
|
||||
code,
|
||||
signal
|
||||
}));
|
||||
this.once('ready', () => resolve(undefined));
|
||||
});
|
||||
if (error) return error;
|
||||
const processParams = {
|
||||
processName: this._processName
|
||||
};
|
||||
this.send({
|
||||
method: '__init__',
|
||||
params: {
|
||||
processParams,
|
||||
runnerScript: this._runnerScript,
|
||||
runnerParams
|
||||
}
|
||||
});
|
||||
}
|
||||
sendMessage(message) {
|
||||
const id = ++this._lastMessageId;
|
||||
this.send({
|
||||
method: '__dispatch__',
|
||||
params: {
|
||||
id,
|
||||
...message
|
||||
}
|
||||
});
|
||||
return new Promise((resolve, reject) => {
|
||||
this._callbacks.set(id, {
|
||||
resolve,
|
||||
reject
|
||||
});
|
||||
});
|
||||
}
|
||||
sendMessageNoReply(message) {
|
||||
this.sendMessage(message).catch(() => {});
|
||||
}
|
||||
async onExit() {}
|
||||
async stop() {
|
||||
if (!this._processDidExit && !this._didSendStop) {
|
||||
this.send({
|
||||
method: '__stop__'
|
||||
});
|
||||
this._didSendStop = true;
|
||||
}
|
||||
if (!this._didExitAndRanOnExit) await new Promise(f => this.once('exit', f));
|
||||
}
|
||||
didSendStop() {
|
||||
return this._didSendStop;
|
||||
}
|
||||
producedEnv() {
|
||||
return this._producedEnv;
|
||||
}
|
||||
send(message) {
|
||||
var _this$process;
|
||||
if (_utilsBundle.debug.enabled('pw:test:protocol')) (0, _utilsBundle.debug)('pw:test:protocol')('SEND ► ' + JSON.stringify(message));
|
||||
(_this$process = this.process) === null || _this$process === void 0 || _this$process.send(message);
|
||||
}
|
||||
}
|
||||
exports.ProcessHost = ProcessHost;
|
||||
203
node_modules/playwright/lib/runner/projectUtils.js
generated
vendored
Normal file
203
node_modules/playwright/lib/runner/projectUtils.js
generated
vendored
Normal file
@@ -0,0 +1,203 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.buildDependentProjects = buildDependentProjects;
|
||||
exports.buildProjectsClosure = buildProjectsClosure;
|
||||
exports.buildTeardownToSetupsMap = buildTeardownToSetupsMap;
|
||||
exports.collectFilesForProject = collectFilesForProject;
|
||||
exports.filterProjects = filterProjects;
|
||||
var _fs = _interopRequireDefault(require("fs"));
|
||||
var _path = _interopRequireDefault(require("path"));
|
||||
var _util = require("util");
|
||||
var _utils = require("playwright-core/lib/utils");
|
||||
var _utilsBundle = require("playwright-core/lib/utilsBundle");
|
||||
var _util2 = require("../util");
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
const readFileAsync = (0, _util.promisify)(_fs.default.readFile);
|
||||
const readDirAsync = (0, _util.promisify)(_fs.default.readdir);
|
||||
function wildcardPatternToRegExp(pattern) {
|
||||
return new RegExp('^' + pattern.split('*').map(_utils.escapeRegExp).join('.*') + '$', 'ig');
|
||||
}
|
||||
function filterProjects(projects, projectNames) {
|
||||
if (!projectNames) return [...projects];
|
||||
const projectNamesToFind = new Set();
|
||||
const unmatchedProjectNames = new Map();
|
||||
const patterns = new Set();
|
||||
for (const name of projectNames) {
|
||||
const lowerCaseName = name.toLocaleLowerCase();
|
||||
if (lowerCaseName.includes('*')) {
|
||||
patterns.add(wildcardPatternToRegExp(lowerCaseName));
|
||||
} else {
|
||||
projectNamesToFind.add(lowerCaseName);
|
||||
unmatchedProjectNames.set(lowerCaseName, name);
|
||||
}
|
||||
}
|
||||
const result = projects.filter(project => {
|
||||
const lowerCaseName = project.project.name.toLocaleLowerCase();
|
||||
if (projectNamesToFind.has(lowerCaseName)) {
|
||||
unmatchedProjectNames.delete(lowerCaseName);
|
||||
return true;
|
||||
}
|
||||
for (const regex of patterns) {
|
||||
regex.lastIndex = 0;
|
||||
if (regex.test(lowerCaseName)) return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
if (unmatchedProjectNames.size) {
|
||||
const unknownProjectNames = Array.from(unmatchedProjectNames.values()).map(n => `"${n}"`).join(', ');
|
||||
throw new Error(`Project(s) ${unknownProjectNames} not found. Available projects: ${projects.map(p => `"${p.project.name}"`).join(', ')}`);
|
||||
}
|
||||
if (!result.length) {
|
||||
const allProjects = projects.map(p => `"${p.project.name}"`).join(', ');
|
||||
throw new Error(`No projects matched. Available projects: ${allProjects}`);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function buildTeardownToSetupsMap(projects) {
|
||||
const result = new Map();
|
||||
for (const project of projects) {
|
||||
if (project.teardown) {
|
||||
const setups = result.get(project.teardown) || [];
|
||||
setups.push(project);
|
||||
result.set(project.teardown, setups);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function buildProjectsClosure(projects, hasTests) {
|
||||
const result = new Map();
|
||||
const visit = (depth, project) => {
|
||||
if (depth > 100) {
|
||||
const error = new Error('Circular dependency detected between projects.');
|
||||
error.stack = '';
|
||||
throw error;
|
||||
}
|
||||
if (depth === 0 && hasTests && !hasTests(project)) return;
|
||||
if (result.get(project) !== 'dependency') result.set(project, depth ? 'dependency' : 'top-level');
|
||||
for (const dep of project.deps) visit(depth + 1, dep);
|
||||
if (project.teardown) visit(depth + 1, project.teardown);
|
||||
};
|
||||
for (const p of projects) visit(0, p);
|
||||
return result;
|
||||
}
|
||||
function buildDependentProjects(forProjects, projects) {
|
||||
const reverseDeps = new Map(projects.map(p => [p, []]));
|
||||
for (const project of projects) {
|
||||
for (const dep of project.deps) reverseDeps.get(dep).push(project);
|
||||
}
|
||||
const result = new Set();
|
||||
const visit = (depth, project) => {
|
||||
if (depth > 100) {
|
||||
const error = new Error('Circular dependency detected between projects.');
|
||||
error.stack = '';
|
||||
throw error;
|
||||
}
|
||||
result.add(project);
|
||||
for (const reverseDep of reverseDeps.get(project)) visit(depth + 1, reverseDep);
|
||||
if (project.teardown) visit(depth + 1, project.teardown);
|
||||
};
|
||||
for (const forProject of forProjects) visit(0, forProject);
|
||||
return result;
|
||||
}
|
||||
async function collectFilesForProject(project, fsCache = new Map()) {
|
||||
const extensions = new Set(['.js', '.ts', '.mjs', '.mts', '.cjs', '.cts', '.jsx', '.tsx', '.mjsx', '.mtsx', '.cjsx', '.ctsx']);
|
||||
const testFileExtension = file => extensions.has(_path.default.extname(file));
|
||||
const allFiles = await cachedCollectFiles(project.project.testDir, project.respectGitIgnore, fsCache);
|
||||
const testMatch = (0, _util2.createFileMatcher)(project.project.testMatch);
|
||||
const testIgnore = (0, _util2.createFileMatcher)(project.project.testIgnore);
|
||||
const testFiles = allFiles.filter(file => {
|
||||
if (!testFileExtension(file)) return false;
|
||||
const isTest = !testIgnore(file) && testMatch(file);
|
||||
if (!isTest) return false;
|
||||
return true;
|
||||
});
|
||||
return testFiles;
|
||||
}
|
||||
async function cachedCollectFiles(testDir, respectGitIgnore, fsCache) {
|
||||
const key = testDir + ':' + respectGitIgnore;
|
||||
let result = fsCache.get(key);
|
||||
if (!result) {
|
||||
result = await collectFiles(testDir, respectGitIgnore);
|
||||
fsCache.set(key, result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
async function collectFiles(testDir, respectGitIgnore) {
|
||||
if (!_fs.default.existsSync(testDir)) return [];
|
||||
if (!_fs.default.statSync(testDir).isDirectory()) return [];
|
||||
const checkIgnores = (entryPath, rules, isDirectory, parentStatus) => {
|
||||
let status = parentStatus;
|
||||
for (const rule of rules) {
|
||||
const ruleIncludes = rule.negate;
|
||||
if (status === 'included' === ruleIncludes) continue;
|
||||
const relative = _path.default.relative(rule.dir, entryPath);
|
||||
if (rule.match('/' + relative) || rule.match(relative)) {
|
||||
// Matches "/dir/file" or "dir/file"
|
||||
status = ruleIncludes ? 'included' : 'ignored';
|
||||
} else if (isDirectory && (rule.match('/' + relative + '/') || rule.match(relative + '/'))) {
|
||||
// Matches "/dir/subdir/" or "dir/subdir/" for directories.
|
||||
status = ruleIncludes ? 'included' : 'ignored';
|
||||
} else if (isDirectory && ruleIncludes && (rule.match('/' + relative, true) || rule.match(relative, true))) {
|
||||
// Matches "/dir/donotskip/" when "/dir" is excluded, but "!/dir/donotskip/file" is included.
|
||||
status = 'ignored-but-recurse';
|
||||
}
|
||||
}
|
||||
return status;
|
||||
};
|
||||
const files = [];
|
||||
const visit = async (dir, rules, status) => {
|
||||
const entries = await readDirAsync(dir, {
|
||||
withFileTypes: true
|
||||
});
|
||||
entries.sort((a, b) => a.name.localeCompare(b.name));
|
||||
if (respectGitIgnore) {
|
||||
const gitignore = entries.find(e => e.isFile() && e.name === '.gitignore');
|
||||
if (gitignore) {
|
||||
const content = await readFileAsync(_path.default.join(dir, gitignore.name), 'utf8');
|
||||
const newRules = content.split(/\r?\n/).map(s => {
|
||||
s = s.trim();
|
||||
if (!s) return;
|
||||
// Use flipNegate, because we handle negation ourselves.
|
||||
const rule = new _utilsBundle.minimatch.Minimatch(s, {
|
||||
matchBase: true,
|
||||
dot: true,
|
||||
flipNegate: true
|
||||
});
|
||||
if (rule.comment) return;
|
||||
rule.dir = dir;
|
||||
return rule;
|
||||
}).filter(rule => !!rule);
|
||||
rules = [...rules, ...newRules];
|
||||
}
|
||||
}
|
||||
for (const entry of entries) {
|
||||
if (entry.name === '.' || entry.name === '..') continue;
|
||||
if (entry.isFile() && entry.name === '.gitignore') continue;
|
||||
if (entry.isDirectory() && entry.name === 'node_modules') continue;
|
||||
const entryPath = _path.default.join(dir, entry.name);
|
||||
const entryStatus = checkIgnores(entryPath, rules, entry.isDirectory(), status);
|
||||
if (entry.isDirectory() && entryStatus !== 'ignored') await visit(entryPath, rules, entryStatus);else if (entry.isFile() && entryStatus === 'included') files.push(entryPath);
|
||||
}
|
||||
};
|
||||
await visit(testDir, [], 'included');
|
||||
return files;
|
||||
}
|
||||
168
node_modules/playwright/lib/runner/rebase.js
generated
vendored
Normal file
168
node_modules/playwright/lib/runner/rebase.js
generated
vendored
Normal file
@@ -0,0 +1,168 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.addSuggestedRebaseline = addSuggestedRebaseline;
|
||||
exports.applySuggestedRebaselines = applySuggestedRebaselines;
|
||||
exports.clearSuggestedRebaselines = clearSuggestedRebaselines;
|
||||
var _fs = _interopRequireDefault(require("fs"));
|
||||
var _path = _interopRequireDefault(require("path"));
|
||||
var _utils = require("playwright-core/lib/utils");
|
||||
var _utilsBundle = require("playwright-core/lib/utilsBundle");
|
||||
var _projectUtils = require("./projectUtils");
|
||||
var _babelBundle = require("../transform/babelBundle");
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
/**
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
const t = _babelBundle.types;
|
||||
const suggestedRebaselines = new _utils.MultiMap();
|
||||
function addSuggestedRebaseline(location, suggestedRebaseline) {
|
||||
suggestedRebaselines.set(location.file, {
|
||||
location,
|
||||
code: suggestedRebaseline
|
||||
});
|
||||
}
|
||||
function clearSuggestedRebaselines() {
|
||||
suggestedRebaselines.clear();
|
||||
}
|
||||
async function applySuggestedRebaselines(config, reporter) {
|
||||
if (config.config.updateSnapshots === 'none') return;
|
||||
if (!suggestedRebaselines.size) return;
|
||||
const [project] = (0, _projectUtils.filterProjects)(config.projects, config.cliProjectFilter);
|
||||
if (!project) return;
|
||||
const patches = [];
|
||||
const files = [];
|
||||
const gitCache = new Map();
|
||||
const patchFile = _path.default.join(project.project.outputDir, 'rebaselines.patch');
|
||||
for (const fileName of [...suggestedRebaselines.keys()].sort()) {
|
||||
const source = await _fs.default.promises.readFile(fileName, 'utf8');
|
||||
const lines = source.split('\n');
|
||||
const replacements = suggestedRebaselines.get(fileName);
|
||||
const fileNode = (0, _babelBundle.babelParse)(source, fileName, true);
|
||||
const ranges = [];
|
||||
(0, _babelBundle.traverse)(fileNode, {
|
||||
CallExpression: path => {
|
||||
const node = path.node;
|
||||
if (node.arguments.length < 1) return;
|
||||
if (!t.isMemberExpression(node.callee)) return;
|
||||
const argument = node.arguments[0];
|
||||
if (!t.isStringLiteral(argument) && !t.isTemplateLiteral(argument)) return;
|
||||
const prop = node.callee.property;
|
||||
if (!prop.loc || !argument.start || !argument.end) return;
|
||||
// Replacements are anchored by the location of the call expression.
|
||||
// However, replacement text is meant to only replace the first argument.
|
||||
for (const replacement of replacements) {
|
||||
// In Babel, rows are 1-based, columns are 0-based.
|
||||
if (prop.loc.start.line !== replacement.location.line) continue;
|
||||
if (prop.loc.start.column + 1 !== replacement.location.column) continue;
|
||||
const indent = lines[prop.loc.start.line - 1].match(/^\s*/)[0];
|
||||
const newText = replacement.code.replace(/\{indent\}/g, indent);
|
||||
ranges.push({
|
||||
start: argument.start,
|
||||
end: argument.end,
|
||||
oldText: source.substring(argument.start, argument.end),
|
||||
newText
|
||||
});
|
||||
// We can have multiple, hopefully equal, replacements for the same location,
|
||||
// for example when a single test runs multiple times because of projects or retries.
|
||||
// Do not apply multiple replacements for the same assertion.
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
ranges.sort((a, b) => b.start - a.start);
|
||||
let result = source;
|
||||
for (const range of ranges) result = result.substring(0, range.start) + range.newText + result.substring(range.end);
|
||||
const relativeName = _path.default.relative(process.cwd(), fileName);
|
||||
files.push(relativeName);
|
||||
if (config.config.updateSourceMethod === 'overwrite') {
|
||||
await _fs.default.promises.writeFile(fileName, result);
|
||||
} else if (config.config.updateSourceMethod === '3way') {
|
||||
await _fs.default.promises.writeFile(fileName, applyPatchWithConflictMarkers(source, result));
|
||||
} else {
|
||||
const gitFolder = findGitRoot(_path.default.dirname(fileName), gitCache);
|
||||
const relativeToGit = _path.default.relative(gitFolder || process.cwd(), fileName);
|
||||
patches.push(createPatch(relativeToGit, source, result));
|
||||
}
|
||||
}
|
||||
const fileList = files.map(file => ' ' + _utils.colors.dim(file)).join('\n');
|
||||
reporter.onStdErr(`\nNew baselines created for:\n\n${fileList}\n`);
|
||||
if (config.config.updateSourceMethod === 'patch') {
|
||||
await _fs.default.promises.mkdir(_path.default.dirname(patchFile), {
|
||||
recursive: true
|
||||
});
|
||||
await _fs.default.promises.writeFile(patchFile, patches.join('\n'));
|
||||
reporter.onStdErr(`\n ` + _utils.colors.cyan('git apply ' + _path.default.relative(process.cwd(), patchFile)) + '\n');
|
||||
}
|
||||
}
|
||||
function createPatch(fileName, before, after) {
|
||||
const file = fileName.replace(/\\/g, '/');
|
||||
const text = _utilsBundle.diff.createPatch(file, before, after, undefined, undefined, {
|
||||
context: 3
|
||||
});
|
||||
return ['diff --git a/' + file + ' b/' + file, '--- a/' + file, '+++ b/' + file, ...text.split('\n').slice(4)].join('\n');
|
||||
}
|
||||
function findGitRoot(dir, cache) {
|
||||
const result = cache.get(dir);
|
||||
if (result !== undefined) return result;
|
||||
const gitPath = _path.default.join(dir, '.git');
|
||||
if (_fs.default.existsSync(gitPath) && _fs.default.lstatSync(gitPath).isDirectory()) {
|
||||
cache.set(dir, dir);
|
||||
return dir;
|
||||
}
|
||||
const parentDir = _path.default.dirname(dir);
|
||||
if (dir === parentDir) {
|
||||
cache.set(dir, null);
|
||||
return null;
|
||||
}
|
||||
const parentResult = findGitRoot(parentDir, cache);
|
||||
cache.set(dir, parentResult);
|
||||
return parentResult;
|
||||
}
|
||||
function applyPatchWithConflictMarkers(oldText, newText) {
|
||||
const diffResult = _utilsBundle.diff.diffLines(oldText, newText);
|
||||
let result = '';
|
||||
let conflict = false;
|
||||
diffResult.forEach(part => {
|
||||
if (part.added) {
|
||||
if (conflict) {
|
||||
result += part.value;
|
||||
result += '>>>>>>> SNAPSHOT\n';
|
||||
conflict = false;
|
||||
} else {
|
||||
result += '<<<<<<< HEAD\n';
|
||||
result += part.value;
|
||||
result += '=======\n';
|
||||
conflict = true;
|
||||
}
|
||||
} else if (part.removed) {
|
||||
result += '<<<<<<< HEAD\n';
|
||||
result += part.value;
|
||||
result += '=======\n';
|
||||
conflict = true;
|
||||
} else {
|
||||
if (conflict) {
|
||||
result += '>>>>>>> SNAPSHOT\n';
|
||||
conflict = false;
|
||||
}
|
||||
result += part.value;
|
||||
}
|
||||
});
|
||||
if (conflict) result += '>>>>>>> SNAPSHOT\n';
|
||||
return result;
|
||||
}
|
||||
151
node_modules/playwright/lib/runner/reporters.js
generated
vendored
Normal file
151
node_modules/playwright/lib/runner/reporters.js
generated
vendored
Normal file
@@ -0,0 +1,151 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.createErrorCollectingReporter = createErrorCollectingReporter;
|
||||
exports.createReporterForTestServer = createReporterForTestServer;
|
||||
exports.createReporters = createReporters;
|
||||
var _path = _interopRequireDefault(require("path"));
|
||||
var _utils = require("playwright-core/lib/utils");
|
||||
var _loadUtils = require("./loadUtils");
|
||||
var _base = require("../reporters/base");
|
||||
var _blob = require("../reporters/blob");
|
||||
var _dot = _interopRequireDefault(require("../reporters/dot"));
|
||||
var _empty = _interopRequireDefault(require("../reporters/empty"));
|
||||
var _github = _interopRequireDefault(require("../reporters/github"));
|
||||
var _html = _interopRequireDefault(require("../reporters/html"));
|
||||
var _json = _interopRequireDefault(require("../reporters/json"));
|
||||
var _junit = _interopRequireDefault(require("../reporters/junit"));
|
||||
var _line = _interopRequireDefault(require("../reporters/line"));
|
||||
var _list = _interopRequireDefault(require("../reporters/list"));
|
||||
var _reporterV = require("../reporters/reporterV2");
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
async function createReporters(config, mode, isTestServer, descriptions) {
|
||||
const defaultReporters = {
|
||||
blob: _blob.BlobReporter,
|
||||
dot: mode === 'list' ? ListModeReporter : _dot.default,
|
||||
line: mode === 'list' ? ListModeReporter : _line.default,
|
||||
list: mode === 'list' ? ListModeReporter : _list.default,
|
||||
github: _github.default,
|
||||
json: _json.default,
|
||||
junit: _junit.default,
|
||||
null: _empty.default,
|
||||
html: _html.default
|
||||
};
|
||||
const reporters = [];
|
||||
descriptions !== null && descriptions !== void 0 ? descriptions : descriptions = config.config.reporter;
|
||||
if (config.configCLIOverrides.additionalReporters) descriptions = [...descriptions, ...config.configCLIOverrides.additionalReporters];
|
||||
const runOptions = reporterOptions(config, mode, isTestServer);
|
||||
for (const r of descriptions) {
|
||||
const [name, arg] = r;
|
||||
const options = {
|
||||
...runOptions,
|
||||
...arg
|
||||
};
|
||||
if (name in defaultReporters) {
|
||||
reporters.push(new defaultReporters[name](options));
|
||||
} else {
|
||||
const reporterConstructor = await (0, _loadUtils.loadReporter)(config, name);
|
||||
reporters.push((0, _reporterV.wrapReporterAsV2)(new reporterConstructor(options)));
|
||||
}
|
||||
}
|
||||
if (process.env.PW_TEST_REPORTER) {
|
||||
const reporterConstructor = await (0, _loadUtils.loadReporter)(config, process.env.PW_TEST_REPORTER);
|
||||
reporters.push((0, _reporterV.wrapReporterAsV2)(new reporterConstructor(runOptions)));
|
||||
}
|
||||
const someReporterPrintsToStdio = reporters.some(r => r.printsToStdio ? r.printsToStdio() : true);
|
||||
if (reporters.length && !someReporterPrintsToStdio) {
|
||||
// Add a line/dot/list-mode reporter for convenience.
|
||||
// Important to put it first, just in case some other reporter stalls onEnd.
|
||||
if (mode === 'list') reporters.unshift(new ListModeReporter());else if (mode !== 'merge') reporters.unshift(!process.env.CI ? new _line.default({
|
||||
omitFailures: true
|
||||
}) : new _dot.default());
|
||||
}
|
||||
return reporters;
|
||||
}
|
||||
async function createReporterForTestServer(file, messageSink) {
|
||||
const reporterConstructor = await (0, _loadUtils.loadReporter)(null, file);
|
||||
return (0, _reporterV.wrapReporterAsV2)(new reporterConstructor({
|
||||
_send: messageSink
|
||||
}));
|
||||
}
|
||||
function createErrorCollectingReporter(screen, writeToConsole) {
|
||||
const errors = [];
|
||||
return {
|
||||
version: () => 'v2',
|
||||
onError(error) {
|
||||
errors.push(error);
|
||||
if (writeToConsole) process.stdout.write((0, _base.formatError)(screen, error).message + '\n');
|
||||
},
|
||||
errors: () => errors
|
||||
};
|
||||
}
|
||||
function reporterOptions(config, mode, isTestServer) {
|
||||
return {
|
||||
configDir: config.configDir,
|
||||
_mode: mode,
|
||||
_isTestServer: isTestServer,
|
||||
_commandHash: computeCommandHash(config)
|
||||
};
|
||||
}
|
||||
function computeCommandHash(config) {
|
||||
const parts = [];
|
||||
// Include project names for readability.
|
||||
if (config.cliProjectFilter) parts.push(...config.cliProjectFilter);
|
||||
const command = {};
|
||||
if (config.cliArgs.length) command.cliArgs = config.cliArgs;
|
||||
if (config.cliGrep) command.cliGrep = config.cliGrep;
|
||||
if (config.cliGrepInvert) command.cliGrepInvert = config.cliGrepInvert;
|
||||
if (config.cliOnlyChanged) command.cliOnlyChanged = config.cliOnlyChanged;
|
||||
if (Object.keys(command).length) parts.push((0, _utils.calculateSha1)(JSON.stringify(command)).substring(0, 7));
|
||||
return parts.join('-');
|
||||
}
|
||||
class ListModeReporter {
|
||||
constructor() {
|
||||
this.config = void 0;
|
||||
}
|
||||
version() {
|
||||
return 'v2';
|
||||
}
|
||||
onConfigure(config) {
|
||||
this.config = config;
|
||||
}
|
||||
onBegin(suite) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Listing tests:`);
|
||||
const tests = suite.allTests();
|
||||
const files = new Set();
|
||||
for (const test of tests) {
|
||||
// root, project, file, ...describes, test
|
||||
const [, projectName,, ...titles] = test.titlePath();
|
||||
const location = `${_path.default.relative(this.config.rootDir, test.location.file)}:${test.location.line}:${test.location.column}`;
|
||||
const projectTitle = projectName ? `[${projectName}] › ` : '';
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(` ${projectTitle}${location} › ${titles.join(' › ')}`);
|
||||
files.add(test.location.file);
|
||||
}
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Total: ${tests.length} ${tests.length === 1 ? 'test' : 'tests'} in ${files.size} ${files.size === 1 ? 'file' : 'files'}`);
|
||||
}
|
||||
onError(error) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error('\n' + (0, _base.formatError)(_base.terminalScreen, error).message);
|
||||
}
|
||||
}
|
||||
121
node_modules/playwright/lib/runner/runner.js
generated
vendored
Normal file
121
node_modules/playwright/lib/runner/runner.js
generated
vendored
Normal file
@@ -0,0 +1,121 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.Runner = void 0;
|
||||
var _lastRun = require("./lastRun");
|
||||
var _projectUtils = require("./projectUtils");
|
||||
var _reporters = require("./reporters");
|
||||
var _tasks = require("./tasks");
|
||||
var _gitCommitInfoPlugin = require("../plugins/gitCommitInfoPlugin");
|
||||
var _webServerPlugin = require("../plugins/webServerPlugin");
|
||||
var _base = require("../reporters/base");
|
||||
var _internalReporter = require("../reporters/internalReporter");
|
||||
var _compilationCache = require("../transform/compilationCache");
|
||||
/**
|
||||
* Copyright 2019 Google Inc. All rights reserved.
|
||||
* Modifications copyright (c) Microsoft Corporation.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
class Runner {
|
||||
constructor(config) {
|
||||
this._config = void 0;
|
||||
this._config = config;
|
||||
}
|
||||
async listTestFiles(projectNames) {
|
||||
const projects = (0, _projectUtils.filterProjects)(this._config.projects, projectNames);
|
||||
const report = {
|
||||
projects: []
|
||||
};
|
||||
for (const project of projects) {
|
||||
report.projects.push({
|
||||
name: project.project.name,
|
||||
testDir: project.project.testDir,
|
||||
use: {
|
||||
testIdAttribute: project.project.use.testIdAttribute
|
||||
},
|
||||
files: await (0, _projectUtils.collectFilesForProject)(project)
|
||||
});
|
||||
}
|
||||
return report;
|
||||
}
|
||||
async runAllTests() {
|
||||
const config = this._config;
|
||||
const listOnly = config.cliListOnly;
|
||||
(0, _gitCommitInfoPlugin.addGitCommitInfoPlugin)(config);
|
||||
|
||||
// Legacy webServer support.
|
||||
(0, _webServerPlugin.webServerPluginsForConfig)(config).forEach(p => config.plugins.push({
|
||||
factory: p
|
||||
}));
|
||||
const reporters = await (0, _reporters.createReporters)(config, listOnly ? 'list' : 'test', false);
|
||||
const lastRun = new _lastRun.LastRunReporter(config);
|
||||
if (config.cliLastFailed) await lastRun.filterLastFailed();
|
||||
const reporter = new _internalReporter.InternalReporter([...reporters, lastRun]);
|
||||
const tasks = listOnly ? [(0, _tasks.createLoadTask)('in-process', {
|
||||
failOnLoadErrors: true,
|
||||
filterOnly: false
|
||||
}), (0, _tasks.createReportBeginTask)()] : [(0, _tasks.createApplyRebaselinesTask)(), ...(0, _tasks.createGlobalSetupTasks)(config), (0, _tasks.createLoadTask)('in-process', {
|
||||
filterOnly: true,
|
||||
failOnLoadErrors: true
|
||||
}), ...(0, _tasks.createRunTestsTasks)(config)];
|
||||
const status = await (0, _tasks.runTasks)(new _tasks.TestRun(config, reporter), tasks, config.config.globalTimeout);
|
||||
|
||||
// Calling process.exit() might truncate large stdout/stderr output.
|
||||
// See https://github.com/nodejs/node/issues/6456.
|
||||
// See https://github.com/nodejs/node/issues/12921
|
||||
await new Promise(resolve => process.stdout.write('', () => resolve()));
|
||||
await new Promise(resolve => process.stderr.write('', () => resolve()));
|
||||
return status;
|
||||
}
|
||||
async findRelatedTestFiles(files) {
|
||||
const errorReporter = (0, _reporters.createErrorCollectingReporter)(_base.terminalScreen);
|
||||
const reporter = new _internalReporter.InternalReporter([errorReporter]);
|
||||
const status = await (0, _tasks.runTasks)(new _tasks.TestRun(this._config, reporter), [...(0, _tasks.createPluginSetupTasks)(this._config), (0, _tasks.createLoadTask)('in-process', {
|
||||
failOnLoadErrors: true,
|
||||
filterOnly: false,
|
||||
populateDependencies: true
|
||||
})]);
|
||||
if (status !== 'passed') return {
|
||||
errors: errorReporter.errors(),
|
||||
testFiles: []
|
||||
};
|
||||
return {
|
||||
testFiles: (0, _compilationCache.affectedTestFiles)(files)
|
||||
};
|
||||
}
|
||||
async runDevServer() {
|
||||
const reporter = new _internalReporter.InternalReporter([(0, _reporters.createErrorCollectingReporter)(_base.terminalScreen, true)]);
|
||||
const status = await (0, _tasks.runTasks)(new _tasks.TestRun(this._config, reporter), [...(0, _tasks.createPluginSetupTasks)(this._config), (0, _tasks.createLoadTask)('in-process', {
|
||||
failOnLoadErrors: true,
|
||||
filterOnly: false
|
||||
}), (0, _tasks.createStartDevServerTask)(), {
|
||||
title: 'wait until interrupted',
|
||||
setup: async () => new Promise(() => {})
|
||||
}]);
|
||||
return {
|
||||
status
|
||||
};
|
||||
}
|
||||
async clearCache() {
|
||||
const reporter = new _internalReporter.InternalReporter([(0, _reporters.createErrorCollectingReporter)(_base.terminalScreen, true)]);
|
||||
const status = await (0, _tasks.runTasks)(new _tasks.TestRun(this._config, reporter), [...(0, _tasks.createPluginSetupTasks)(this._config), (0, _tasks.createClearCacheTask)(this._config)]);
|
||||
return {
|
||||
status
|
||||
};
|
||||
}
|
||||
}
|
||||
exports.Runner = Runner;
|
||||
100
node_modules/playwright/lib/runner/sigIntWatcher.js
generated
vendored
Normal file
100
node_modules/playwright/lib/runner/sigIntWatcher.js
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.SigIntWatcher = void 0;
|
||||
var _FixedNodeSIGINTHandler;
|
||||
/**
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
class SigIntWatcher {
|
||||
constructor() {
|
||||
this._hadSignal = false;
|
||||
this._sigintPromise = void 0;
|
||||
this._sigintHandler = void 0;
|
||||
let sigintCallback;
|
||||
this._sigintPromise = new Promise(f => sigintCallback = f);
|
||||
this._sigintHandler = () => {
|
||||
FixedNodeSIGINTHandler.off(this._sigintHandler);
|
||||
this._hadSignal = true;
|
||||
sigintCallback();
|
||||
};
|
||||
FixedNodeSIGINTHandler.on(this._sigintHandler);
|
||||
}
|
||||
promise() {
|
||||
return this._sigintPromise;
|
||||
}
|
||||
hadSignal() {
|
||||
return this._hadSignal;
|
||||
}
|
||||
disarm() {
|
||||
FixedNodeSIGINTHandler.off(this._sigintHandler);
|
||||
}
|
||||
}
|
||||
|
||||
// NPM/NPX will send us duplicate SIGINT signals, so we need to ignore them.
|
||||
exports.SigIntWatcher = SigIntWatcher;
|
||||
class FixedNodeSIGINTHandler {
|
||||
static _install() {
|
||||
if (!this._handlerInstalled) {
|
||||
this._handlerInstalled = true;
|
||||
process.on('SIGINT', this._dispatch);
|
||||
}
|
||||
}
|
||||
static _uninstall() {
|
||||
if (this._handlerInstalled) {
|
||||
this._handlerInstalled = false;
|
||||
process.off('SIGINT', this._dispatch);
|
||||
}
|
||||
}
|
||||
static on(handler) {
|
||||
this._handlers.push(handler);
|
||||
if (this._handlers.length === 1) this._install();
|
||||
}
|
||||
static off(handler) {
|
||||
this._handlers = this._handlers.filter(h => h !== handler);
|
||||
if (!this._ignoreNextSIGINTs && !this._handlers.length) this._uninstall();
|
||||
}
|
||||
}
|
||||
_FixedNodeSIGINTHandler = FixedNodeSIGINTHandler;
|
||||
FixedNodeSIGINTHandler._handlers = [];
|
||||
FixedNodeSIGINTHandler._ignoreNextSIGINTs = false;
|
||||
FixedNodeSIGINTHandler._handlerInstalled = false;
|
||||
FixedNodeSIGINTHandler._dispatch = () => {
|
||||
if (_FixedNodeSIGINTHandler._ignoreNextSIGINTs) return;
|
||||
_FixedNodeSIGINTHandler._ignoreNextSIGINTs = true;
|
||||
setTimeout(() => {
|
||||
_FixedNodeSIGINTHandler._ignoreNextSIGINTs = false;
|
||||
// We remove the handler so that second Ctrl+C immediately kills the process
|
||||
// via the default sigint handler. This is handy in the case where our shutdown
|
||||
// takes a lot of time or is buggy.
|
||||
//
|
||||
// When running through NPM we might get multiple SIGINT signals
|
||||
// for a single Ctrl+C - this is an NPM bug present since NPM v6+.
|
||||
// https://github.com/npm/cli/issues/1591
|
||||
// https://github.com/npm/cli/issues/2124
|
||||
// https://github.com/npm/cli/issues/5021
|
||||
//
|
||||
// Therefore, removing the handler too soon will just kill the process
|
||||
// with default handler without printing the results.
|
||||
// We work around this by giving NPM 1000ms to send us duplicate signals.
|
||||
// The side effect is that slow shutdown or bug in our process will force
|
||||
// the user to hit Ctrl+C again after at least a second.
|
||||
if (!_FixedNodeSIGINTHandler._handlers.length) _FixedNodeSIGINTHandler._uninstall();
|
||||
}, 1000);
|
||||
for (const handler of _FixedNodeSIGINTHandler._handlers) handler();
|
||||
};
|
||||
136
node_modules/playwright/lib/runner/taskRunner.js
generated
vendored
Normal file
136
node_modules/playwright/lib/runner/taskRunner.js
generated
vendored
Normal file
@@ -0,0 +1,136 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.TaskRunner = void 0;
|
||||
var _utils = require("playwright-core/lib/utils");
|
||||
var _utilsBundle = require("playwright-core/lib/utilsBundle");
|
||||
var _sigIntWatcher = require("./sigIntWatcher");
|
||||
var _util = require("../util");
|
||||
/**
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
class TaskRunner {
|
||||
constructor(reporter, globalTimeoutForError) {
|
||||
this._tasks = [];
|
||||
this._reporter = void 0;
|
||||
this._hasErrors = false;
|
||||
this._interrupted = false;
|
||||
this._isTearDown = false;
|
||||
this._globalTimeoutForError = void 0;
|
||||
this._reporter = reporter;
|
||||
this._globalTimeoutForError = globalTimeoutForError;
|
||||
}
|
||||
addTask(task) {
|
||||
this._tasks.push(task);
|
||||
}
|
||||
async run(context, deadline, cancelPromise) {
|
||||
const {
|
||||
status,
|
||||
cleanup
|
||||
} = await this.runDeferCleanup(context, deadline, cancelPromise);
|
||||
const teardownStatus = await cleanup();
|
||||
return status === 'passed' ? teardownStatus : status;
|
||||
}
|
||||
async runDeferCleanup(context, deadline, cancelPromise = new _utils.ManualPromise()) {
|
||||
const sigintWatcher = new _sigIntWatcher.SigIntWatcher();
|
||||
const timeoutWatcher = new TimeoutWatcher(deadline);
|
||||
const teardownRunner = new TaskRunner(this._reporter, this._globalTimeoutForError);
|
||||
teardownRunner._isTearDown = true;
|
||||
let currentTaskName;
|
||||
const taskLoop = async () => {
|
||||
for (const task of this._tasks) {
|
||||
currentTaskName = task.title;
|
||||
if (this._interrupted) break;
|
||||
(0, _utilsBundle.debug)('pw:test:task')(`"${task.title}" started`);
|
||||
const errors = [];
|
||||
const softErrors = [];
|
||||
try {
|
||||
var _task$setup;
|
||||
teardownRunner._tasks.unshift({
|
||||
title: `teardown for ${task.title}`,
|
||||
setup: task.teardown
|
||||
});
|
||||
await ((_task$setup = task.setup) === null || _task$setup === void 0 ? void 0 : _task$setup.call(task, context, errors, softErrors));
|
||||
} catch (e) {
|
||||
(0, _utilsBundle.debug)('pw:test:task')(`error in "${task.title}": `, e);
|
||||
errors.push((0, _util.serializeError)(e));
|
||||
} finally {
|
||||
for (const error of [...softErrors, ...errors]) {
|
||||
var _this$_reporter$onErr, _this$_reporter;
|
||||
(_this$_reporter$onErr = (_this$_reporter = this._reporter).onError) === null || _this$_reporter$onErr === void 0 || _this$_reporter$onErr.call(_this$_reporter, error);
|
||||
}
|
||||
if (errors.length) {
|
||||
if (!this._isTearDown) this._interrupted = true;
|
||||
this._hasErrors = true;
|
||||
}
|
||||
}
|
||||
(0, _utilsBundle.debug)('pw:test:task')(`"${task.title}" finished`);
|
||||
}
|
||||
};
|
||||
await Promise.race([taskLoop(), cancelPromise, sigintWatcher.promise(), timeoutWatcher.promise]);
|
||||
sigintWatcher.disarm();
|
||||
timeoutWatcher.disarm();
|
||||
|
||||
// Prevent subsequent tasks from running.
|
||||
this._interrupted = true;
|
||||
let status = 'passed';
|
||||
if (sigintWatcher.hadSignal() || cancelPromise !== null && cancelPromise !== void 0 && cancelPromise.isDone()) {
|
||||
status = 'interrupted';
|
||||
} else if (timeoutWatcher.timedOut()) {
|
||||
var _this$_reporter$onErr2, _this$_reporter2;
|
||||
(_this$_reporter$onErr2 = (_this$_reporter2 = this._reporter).onError) === null || _this$_reporter$onErr2 === void 0 || _this$_reporter$onErr2.call(_this$_reporter2, {
|
||||
message: _utils.colors.red(`Timed out waiting ${this._globalTimeoutForError / 1000}s for the ${currentTaskName} to run`)
|
||||
});
|
||||
status = 'timedout';
|
||||
} else if (this._hasErrors) {
|
||||
status = 'failed';
|
||||
}
|
||||
cancelPromise === null || cancelPromise === void 0 || cancelPromise.resolve();
|
||||
// Note that upon hitting deadline, we "run cleanup", but it exits immediately
|
||||
// because of the same deadline. Essentially, we're not performing any cleanup.
|
||||
const cleanup = () => teardownRunner.runDeferCleanup(context, deadline).then(r => r.status);
|
||||
return {
|
||||
status,
|
||||
cleanup
|
||||
};
|
||||
}
|
||||
}
|
||||
exports.TaskRunner = TaskRunner;
|
||||
class TimeoutWatcher {
|
||||
constructor(deadline) {
|
||||
this._timedOut = false;
|
||||
this.promise = new _utils.ManualPromise();
|
||||
this._timer = void 0;
|
||||
if (!deadline) return;
|
||||
if (deadline - (0, _utils.monotonicTime)() <= 0) {
|
||||
this._timedOut = true;
|
||||
this.promise.resolve();
|
||||
return;
|
||||
}
|
||||
this._timer = setTimeout(() => {
|
||||
this._timedOut = true;
|
||||
this.promise.resolve();
|
||||
}, deadline - (0, _utils.monotonicTime)());
|
||||
}
|
||||
timedOut() {
|
||||
return this._timedOut;
|
||||
}
|
||||
disarm() {
|
||||
clearTimeout(this._timer);
|
||||
}
|
||||
}
|
||||
433
node_modules/playwright/lib/runner/tasks.js
generated
vendored
Normal file
433
node_modules/playwright/lib/runner/tasks.js
generated
vendored
Normal file
@@ -0,0 +1,433 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.TestRun = void 0;
|
||||
exports.createApplyRebaselinesTask = createApplyRebaselinesTask;
|
||||
exports.createClearCacheTask = createClearCacheTask;
|
||||
exports.createGlobalSetupTasks = createGlobalSetupTasks;
|
||||
exports.createListFilesTask = createListFilesTask;
|
||||
exports.createLoadTask = createLoadTask;
|
||||
exports.createPluginSetupTasks = createPluginSetupTasks;
|
||||
exports.createReportBeginTask = createReportBeginTask;
|
||||
exports.createRunTestsTasks = createRunTestsTasks;
|
||||
exports.createStartDevServerTask = createStartDevServerTask;
|
||||
exports.runTasks = runTasks;
|
||||
exports.runTasksDeferCleanup = runTasksDeferCleanup;
|
||||
var _fs = _interopRequireDefault(require("fs"));
|
||||
var _path = _interopRequireDefault(require("path"));
|
||||
var _util = require("util");
|
||||
var _utils = require("playwright-core/lib/utils");
|
||||
var _utilsBundle = require("playwright-core/lib/utilsBundle");
|
||||
var _dispatcher = require("./dispatcher");
|
||||
var _failureTracker = require("./failureTracker");
|
||||
var _loadUtils = require("./loadUtils");
|
||||
var _projectUtils = require("./projectUtils");
|
||||
var _rebase = require("./rebase");
|
||||
var _taskRunner = require("./taskRunner");
|
||||
var _vcs = require("./vcs");
|
||||
var _test = require("../common/test");
|
||||
var _testGroups = require("../runner/testGroups");
|
||||
var _compilationCache = require("../transform/compilationCache");
|
||||
var _util2 = require("../util");
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
const readDirAsync = (0, _util.promisify)(_fs.default.readdir);
|
||||
class TestRun {
|
||||
constructor(config, reporter) {
|
||||
this.config = void 0;
|
||||
this.reporter = void 0;
|
||||
this.failureTracker = void 0;
|
||||
this.rootSuite = undefined;
|
||||
this.phases = [];
|
||||
this.projectFiles = new Map();
|
||||
this.projectSuites = new Map();
|
||||
this.config = config;
|
||||
this.reporter = reporter;
|
||||
this.failureTracker = new _failureTracker.FailureTracker(config);
|
||||
}
|
||||
}
|
||||
exports.TestRun = TestRun;
|
||||
async function runTasks(testRun, tasks, globalTimeout, cancelPromise) {
|
||||
const deadline = globalTimeout ? (0, _utils.monotonicTime)() + globalTimeout : 0;
|
||||
const taskRunner = new _taskRunner.TaskRunner(testRun.reporter, globalTimeout || 0);
|
||||
for (const task of tasks) taskRunner.addTask(task);
|
||||
testRun.reporter.onConfigure(testRun.config.config);
|
||||
const status = await taskRunner.run(testRun, deadline, cancelPromise);
|
||||
return await finishTaskRun(testRun, status);
|
||||
}
|
||||
async function runTasksDeferCleanup(testRun, tasks) {
|
||||
const taskRunner = new _taskRunner.TaskRunner(testRun.reporter, 0);
|
||||
for (const task of tasks) taskRunner.addTask(task);
|
||||
testRun.reporter.onConfigure(testRun.config.config);
|
||||
const {
|
||||
status,
|
||||
cleanup
|
||||
} = await taskRunner.runDeferCleanup(testRun, 0);
|
||||
return {
|
||||
status: await finishTaskRun(testRun, status),
|
||||
cleanup
|
||||
};
|
||||
}
|
||||
async function finishTaskRun(testRun, status) {
|
||||
if (status === 'passed') status = testRun.failureTracker.result();
|
||||
const modifiedResult = await testRun.reporter.onEnd({
|
||||
status
|
||||
});
|
||||
if (modifiedResult && modifiedResult.status) status = modifiedResult.status;
|
||||
await testRun.reporter.onExit();
|
||||
return status;
|
||||
}
|
||||
function createGlobalSetupTasks(config) {
|
||||
const tasks = [];
|
||||
if (!config.configCLIOverrides.preserveOutputDir && !process.env.PW_TEST_NO_REMOVE_OUTPUT_DIRS) tasks.push(createRemoveOutputDirsTask());
|
||||
tasks.push(...createPluginSetupTasks(config), ...config.globalTeardowns.map(file => createGlobalTeardownTask(file, config)).reverse(), ...config.globalSetups.map(file => createGlobalSetupTask(file, config)));
|
||||
return tasks;
|
||||
}
|
||||
function createRunTestsTasks(config) {
|
||||
return [createPhasesTask(), createReportBeginTask(), ...config.plugins.map(plugin => createPluginBeginTask(plugin)), createRunTestsTask()];
|
||||
}
|
||||
function createClearCacheTask(config) {
|
||||
return {
|
||||
title: 'clear cache',
|
||||
setup: async () => {
|
||||
await (0, _util2.removeDirAndLogToConsole)(_compilationCache.cacheDir);
|
||||
for (const plugin of config.plugins) {
|
||||
var _plugin$instance, _plugin$instance$clea;
|
||||
await ((_plugin$instance = plugin.instance) === null || _plugin$instance === void 0 || (_plugin$instance$clea = _plugin$instance.clearCache) === null || _plugin$instance$clea === void 0 ? void 0 : _plugin$instance$clea.call(_plugin$instance));
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
function createReportBeginTask() {
|
||||
return {
|
||||
title: 'report begin',
|
||||
setup: async testRun => {
|
||||
var _testRun$reporter$onB, _testRun$reporter;
|
||||
(_testRun$reporter$onB = (_testRun$reporter = testRun.reporter).onBegin) === null || _testRun$reporter$onB === void 0 || _testRun$reporter$onB.call(_testRun$reporter, testRun.rootSuite);
|
||||
},
|
||||
teardown: async ({}) => {}
|
||||
};
|
||||
}
|
||||
function createPluginSetupTasks(config) {
|
||||
return config.plugins.map(plugin => ({
|
||||
title: 'plugin setup',
|
||||
setup: async ({
|
||||
reporter
|
||||
}) => {
|
||||
var _plugin$instance2, _plugin$instance2$set;
|
||||
if (typeof plugin.factory === 'function') plugin.instance = await plugin.factory();else plugin.instance = plugin.factory;
|
||||
await ((_plugin$instance2 = plugin.instance) === null || _plugin$instance2 === void 0 || (_plugin$instance2$set = _plugin$instance2.setup) === null || _plugin$instance2$set === void 0 ? void 0 : _plugin$instance2$set.call(_plugin$instance2, config.config, config.configDir, reporter));
|
||||
},
|
||||
teardown: async () => {
|
||||
var _plugin$instance3, _plugin$instance3$tea;
|
||||
await ((_plugin$instance3 = plugin.instance) === null || _plugin$instance3 === void 0 || (_plugin$instance3$tea = _plugin$instance3.teardown) === null || _plugin$instance3$tea === void 0 ? void 0 : _plugin$instance3$tea.call(_plugin$instance3));
|
||||
}
|
||||
}));
|
||||
}
|
||||
function createPluginBeginTask(plugin) {
|
||||
return {
|
||||
title: 'plugin begin',
|
||||
setup: async testRun => {
|
||||
var _plugin$instance4, _plugin$instance4$beg;
|
||||
await ((_plugin$instance4 = plugin.instance) === null || _plugin$instance4 === void 0 || (_plugin$instance4$beg = _plugin$instance4.begin) === null || _plugin$instance4$beg === void 0 ? void 0 : _plugin$instance4$beg.call(_plugin$instance4, testRun.rootSuite));
|
||||
},
|
||||
teardown: async () => {
|
||||
var _plugin$instance5, _plugin$instance5$end;
|
||||
await ((_plugin$instance5 = plugin.instance) === null || _plugin$instance5 === void 0 || (_plugin$instance5$end = _plugin$instance5.end) === null || _plugin$instance5$end === void 0 ? void 0 : _plugin$instance5$end.call(_plugin$instance5));
|
||||
}
|
||||
};
|
||||
}
|
||||
function createGlobalSetupTask(file, config) {
|
||||
let title = 'global setup';
|
||||
if (config.globalSetups.length > 1) title += ` (${file})`;
|
||||
let globalSetupResult;
|
||||
return {
|
||||
title,
|
||||
setup: async ({
|
||||
config
|
||||
}) => {
|
||||
const setupHook = await (0, _loadUtils.loadGlobalHook)(config, file);
|
||||
globalSetupResult = await setupHook(config.config);
|
||||
},
|
||||
teardown: async () => {
|
||||
if (typeof globalSetupResult === 'function') await globalSetupResult();
|
||||
}
|
||||
};
|
||||
}
|
||||
function createGlobalTeardownTask(file, config) {
|
||||
let title = 'global teardown';
|
||||
if (config.globalTeardowns.length > 1) title += ` (${file})`;
|
||||
return {
|
||||
title,
|
||||
teardown: async ({
|
||||
config
|
||||
}) => {
|
||||
const teardownHook = await (0, _loadUtils.loadGlobalHook)(config, file);
|
||||
await teardownHook(config.config);
|
||||
}
|
||||
};
|
||||
}
|
||||
function createRemoveOutputDirsTask() {
|
||||
return {
|
||||
title: 'clear output',
|
||||
setup: async ({
|
||||
config
|
||||
}) => {
|
||||
const outputDirs = new Set();
|
||||
const projects = (0, _projectUtils.filterProjects)(config.projects, config.cliProjectFilter);
|
||||
projects.forEach(p => outputDirs.add(p.project.outputDir));
|
||||
await Promise.all(Array.from(outputDirs).map(outputDir => (0, _utils.removeFolders)([outputDir]).then(async ([error]) => {
|
||||
if (!error) return;
|
||||
if (error.code === 'EBUSY') {
|
||||
// We failed to remove folder, might be due to the whole folder being mounted inside a container:
|
||||
// https://github.com/microsoft/playwright/issues/12106
|
||||
// Do a best-effort to remove all files inside of it instead.
|
||||
const entries = await readDirAsync(outputDir).catch(e => []);
|
||||
await Promise.all(entries.map(entry => (0, _utils.removeFolders)([_path.default.join(outputDir, entry)])));
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
})));
|
||||
}
|
||||
};
|
||||
}
|
||||
function createListFilesTask() {
|
||||
return {
|
||||
title: 'load tests',
|
||||
setup: async (testRun, errors) => {
|
||||
testRun.rootSuite = await (0, _loadUtils.createRootSuite)(testRun, errors, false);
|
||||
testRun.failureTracker.onRootSuite(testRun.rootSuite);
|
||||
await (0, _loadUtils.collectProjectsAndTestFiles)(testRun, false);
|
||||
for (const [project, files] of testRun.projectFiles) {
|
||||
const projectSuite = new _test.Suite(project.project.name, 'project');
|
||||
projectSuite._fullProject = project;
|
||||
testRun.rootSuite._addSuite(projectSuite);
|
||||
const suites = files.map(file => {
|
||||
const title = _path.default.relative(testRun.config.config.rootDir, file);
|
||||
const suite = new _test.Suite(title, 'file');
|
||||
suite.location = {
|
||||
file,
|
||||
line: 0,
|
||||
column: 0
|
||||
};
|
||||
projectSuite._addSuite(suite);
|
||||
return suite;
|
||||
});
|
||||
testRun.projectSuites.set(project, suites);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
function createLoadTask(mode, options) {
|
||||
return {
|
||||
title: 'load tests',
|
||||
setup: async (testRun, errors, softErrors) => {
|
||||
await (0, _loadUtils.collectProjectsAndTestFiles)(testRun, !!options.doNotRunDepsOutsideProjectFilter);
|
||||
await (0, _loadUtils.loadFileSuites)(testRun, mode, options.failOnLoadErrors ? errors : softErrors);
|
||||
if (testRun.config.cliOnlyChanged || options.populateDependencies) {
|
||||
for (const plugin of testRun.config.plugins) {
|
||||
var _plugin$instance6, _plugin$instance6$pop;
|
||||
await ((_plugin$instance6 = plugin.instance) === null || _plugin$instance6 === void 0 || (_plugin$instance6$pop = _plugin$instance6.populateDependencies) === null || _plugin$instance6$pop === void 0 ? void 0 : _plugin$instance6$pop.call(_plugin$instance6));
|
||||
}
|
||||
}
|
||||
let cliOnlyChangedMatcher = undefined;
|
||||
if (testRun.config.cliOnlyChanged) {
|
||||
const changedFiles = await (0, _vcs.detectChangedTestFiles)(testRun.config.cliOnlyChanged, testRun.config.configDir);
|
||||
cliOnlyChangedMatcher = file => changedFiles.has(file);
|
||||
}
|
||||
testRun.rootSuite = await (0, _loadUtils.createRootSuite)(testRun, options.failOnLoadErrors ? errors : softErrors, !!options.filterOnly, cliOnlyChangedMatcher);
|
||||
testRun.failureTracker.onRootSuite(testRun.rootSuite);
|
||||
// Fail when no tests.
|
||||
if (options.failOnLoadErrors && !testRun.rootSuite.allTests().length && !testRun.config.cliPassWithNoTests && !testRun.config.config.shard && !testRun.config.cliOnlyChanged) {
|
||||
if (testRun.config.cliArgs.length) {
|
||||
throw new Error([`No tests found.`, `Make sure that arguments are regular expressions matching test files.`, `You may need to escape symbols like "$" or "*" and quote the arguments.`].join('\n'));
|
||||
}
|
||||
throw new Error(`No tests found`);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
function createApplyRebaselinesTask() {
|
||||
return {
|
||||
title: 'apply rebaselines',
|
||||
setup: async () => {
|
||||
(0, _rebase.clearSuggestedRebaselines)();
|
||||
},
|
||||
teardown: async ({
|
||||
config,
|
||||
reporter
|
||||
}) => {
|
||||
await (0, _rebase.applySuggestedRebaselines)(config, reporter);
|
||||
}
|
||||
};
|
||||
}
|
||||
function createPhasesTask() {
|
||||
return {
|
||||
title: 'create phases',
|
||||
setup: async testRun => {
|
||||
let maxConcurrentTestGroups = 0;
|
||||
const processed = new Set();
|
||||
const projectToSuite = new Map(testRun.rootSuite.suites.map(suite => [suite._fullProject, suite]));
|
||||
const allProjects = [...projectToSuite.keys()];
|
||||
const teardownToSetups = (0, _projectUtils.buildTeardownToSetupsMap)(allProjects);
|
||||
const teardownToSetupsDependents = new Map();
|
||||
for (const [teardown, setups] of teardownToSetups) {
|
||||
const closure = (0, _projectUtils.buildDependentProjects)(setups, allProjects);
|
||||
closure.delete(teardown);
|
||||
teardownToSetupsDependents.set(teardown, [...closure]);
|
||||
}
|
||||
for (let i = 0; i < projectToSuite.size; i++) {
|
||||
// Find all projects that have all their dependencies processed by previous phases.
|
||||
const phaseProjects = [];
|
||||
for (const project of projectToSuite.keys()) {
|
||||
if (processed.has(project)) continue;
|
||||
const projectsThatShouldFinishFirst = [...project.deps, ...(teardownToSetupsDependents.get(project) || [])];
|
||||
if (projectsThatShouldFinishFirst.find(p => !processed.has(p))) continue;
|
||||
phaseProjects.push(project);
|
||||
}
|
||||
|
||||
// Create a new phase.
|
||||
for (const project of phaseProjects) processed.add(project);
|
||||
if (phaseProjects.length) {
|
||||
let testGroupsInPhase = 0;
|
||||
const phase = {
|
||||
dispatcher: new _dispatcher.Dispatcher(testRun.config, testRun.reporter, testRun.failureTracker),
|
||||
projects: []
|
||||
};
|
||||
testRun.phases.push(phase);
|
||||
for (const project of phaseProjects) {
|
||||
const projectSuite = projectToSuite.get(project);
|
||||
const testGroups = (0, _testGroups.createTestGroups)(projectSuite, testRun.config.config.workers);
|
||||
phase.projects.push({
|
||||
project,
|
||||
projectSuite,
|
||||
testGroups
|
||||
});
|
||||
testGroupsInPhase += testGroups.length;
|
||||
}
|
||||
(0, _utilsBundle.debug)('pw:test:task')(`created phase #${testRun.phases.length} with ${phase.projects.map(p => p.project.project.name).sort()} projects, ${testGroupsInPhase} testGroups`);
|
||||
maxConcurrentTestGroups = Math.max(maxConcurrentTestGroups, testGroupsInPhase);
|
||||
}
|
||||
}
|
||||
testRun.config.config.metadata.actualWorkers = Math.min(testRun.config.config.workers, maxConcurrentTestGroups);
|
||||
}
|
||||
};
|
||||
}
|
||||
function createRunTestsTask() {
|
||||
return {
|
||||
title: 'test suite',
|
||||
setup: async ({
|
||||
phases,
|
||||
failureTracker
|
||||
}) => {
|
||||
const successfulProjects = new Set();
|
||||
const extraEnvByProjectId = new Map();
|
||||
const teardownToSetups = (0, _projectUtils.buildTeardownToSetupsMap)(phases.map(phase => phase.projects.map(p => p.project)).flat());
|
||||
for (const {
|
||||
dispatcher,
|
||||
projects
|
||||
} of phases) {
|
||||
// Each phase contains dispatcher and a set of test groups.
|
||||
// We don't want to run the test groups belonging to the projects
|
||||
// that depend on the projects that failed previously.
|
||||
const phaseTestGroups = [];
|
||||
for (const {
|
||||
project,
|
||||
testGroups
|
||||
} of projects) {
|
||||
// Inherit extra environment variables from dependencies.
|
||||
let extraEnv = {};
|
||||
for (const dep of project.deps) extraEnv = {
|
||||
...extraEnv,
|
||||
...extraEnvByProjectId.get(dep.id)
|
||||
};
|
||||
for (const setup of teardownToSetups.get(project) || []) extraEnv = {
|
||||
...extraEnv,
|
||||
...extraEnvByProjectId.get(setup.id)
|
||||
};
|
||||
extraEnvByProjectId.set(project.id, extraEnv);
|
||||
const hasFailedDeps = project.deps.some(p => !successfulProjects.has(p));
|
||||
if (!hasFailedDeps) phaseTestGroups.push(...testGroups);
|
||||
}
|
||||
if (phaseTestGroups.length) {
|
||||
await dispatcher.run(phaseTestGroups, extraEnvByProjectId);
|
||||
await dispatcher.stop();
|
||||
for (const [projectId, envProduced] of dispatcher.producedEnvByProjectId()) {
|
||||
const extraEnv = extraEnvByProjectId.get(projectId) || {};
|
||||
extraEnvByProjectId.set(projectId, {
|
||||
...extraEnv,
|
||||
...envProduced
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// If the worker broke, fail everything, we have no way of knowing which
|
||||
// projects failed.
|
||||
if (!failureTracker.hasWorkerErrors()) {
|
||||
for (const {
|
||||
project,
|
||||
projectSuite
|
||||
} of projects) {
|
||||
const hasFailedDeps = project.deps.some(p => !successfulProjects.has(p));
|
||||
if (!hasFailedDeps && !projectSuite.allTests().some(test => !test.ok())) successfulProjects.add(project);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
teardown: async ({
|
||||
phases
|
||||
}) => {
|
||||
for (const {
|
||||
dispatcher
|
||||
} of phases.reverse()) await dispatcher.stop();
|
||||
}
|
||||
};
|
||||
}
|
||||
function createStartDevServerTask() {
|
||||
return {
|
||||
title: 'start dev server',
|
||||
setup: async ({
|
||||
config
|
||||
}, errors, softErrors) => {
|
||||
if (config.plugins.some(plugin => !!plugin.devServerCleanup)) {
|
||||
errors.push({
|
||||
message: `DevServer is already running`
|
||||
});
|
||||
return;
|
||||
}
|
||||
for (const plugin of config.plugins) {
|
||||
var _plugin$instance7, _plugin$instance7$sta;
|
||||
plugin.devServerCleanup = await ((_plugin$instance7 = plugin.instance) === null || _plugin$instance7 === void 0 || (_plugin$instance7$sta = _plugin$instance7.startDevServer) === null || _plugin$instance7$sta === void 0 ? void 0 : _plugin$instance7$sta.call(_plugin$instance7));
|
||||
}
|
||||
if (!config.plugins.some(plugin => !!plugin.devServerCleanup)) errors.push({
|
||||
message: `DevServer is not available in the package you are using. Did you mean to use component testing?`
|
||||
});
|
||||
},
|
||||
teardown: async ({
|
||||
config
|
||||
}) => {
|
||||
for (const plugin of config.plugins) {
|
||||
var _plugin$devServerClea;
|
||||
await ((_plugin$devServerClea = plugin.devServerCleanup) === null || _plugin$devServerClea === void 0 ? void 0 : _plugin$devServerClea.call(plugin));
|
||||
plugin.devServerCleanup = undefined;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
136
node_modules/playwright/lib/runner/testGroups.js
generated
vendored
Normal file
136
node_modules/playwright/lib/runner/testGroups.js
generated
vendored
Normal file
@@ -0,0 +1,136 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.createTestGroups = createTestGroups;
|
||||
exports.filterForShard = filterForShard;
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
function createTestGroups(projectSuite, expectedParallelism) {
|
||||
// This function groups tests that can be run together.
|
||||
// Tests cannot be run together when:
|
||||
// - They belong to different projects - requires different workers.
|
||||
// - They have a different repeatEachIndex - requires different workers.
|
||||
// - They have a different set of worker fixtures in the pool - requires different workers.
|
||||
// - They have a different requireFile - reuses the worker, but runs each requireFile separately.
|
||||
// - They belong to a parallel suite.
|
||||
|
||||
// Using the map "workerHash -> requireFile -> group" makes us preserve the natural order
|
||||
// of worker hashes and require files for the simple cases.
|
||||
const groups = new Map();
|
||||
const createGroup = test => {
|
||||
return {
|
||||
workerHash: test._workerHash,
|
||||
requireFile: test._requireFile,
|
||||
repeatEachIndex: test.repeatEachIndex,
|
||||
projectId: test._projectId,
|
||||
tests: []
|
||||
};
|
||||
};
|
||||
for (const test of projectSuite.allTests()) {
|
||||
let withWorkerHash = groups.get(test._workerHash);
|
||||
if (!withWorkerHash) {
|
||||
withWorkerHash = new Map();
|
||||
groups.set(test._workerHash, withWorkerHash);
|
||||
}
|
||||
let withRequireFile = withWorkerHash.get(test._requireFile);
|
||||
if (!withRequireFile) {
|
||||
withRequireFile = {
|
||||
general: createGroup(test),
|
||||
parallel: new Map(),
|
||||
parallelWithHooks: createGroup(test)
|
||||
};
|
||||
withWorkerHash.set(test._requireFile, withRequireFile);
|
||||
}
|
||||
|
||||
// Note that a parallel suite cannot be inside a serial suite. This is enforced in TestType.
|
||||
let insideParallel = false;
|
||||
let outerMostSequentialSuite;
|
||||
let hasAllHooks = false;
|
||||
for (let parent = test.parent; parent; parent = parent.parent) {
|
||||
if (parent._parallelMode === 'serial' || parent._parallelMode === 'default') outerMostSequentialSuite = parent;
|
||||
insideParallel = insideParallel || parent._parallelMode === 'parallel';
|
||||
hasAllHooks = hasAllHooks || parent._hooks.some(hook => hook.type === 'beforeAll' || hook.type === 'afterAll');
|
||||
}
|
||||
if (insideParallel) {
|
||||
if (hasAllHooks && !outerMostSequentialSuite) {
|
||||
withRequireFile.parallelWithHooks.tests.push(test);
|
||||
} else {
|
||||
const key = outerMostSequentialSuite || test;
|
||||
let group = withRequireFile.parallel.get(key);
|
||||
if (!group) {
|
||||
group = createGroup(test);
|
||||
withRequireFile.parallel.set(key, group);
|
||||
}
|
||||
group.tests.push(test);
|
||||
}
|
||||
} else {
|
||||
withRequireFile.general.tests.push(test);
|
||||
}
|
||||
}
|
||||
const result = [];
|
||||
for (const withWorkerHash of groups.values()) {
|
||||
for (const withRequireFile of withWorkerHash.values()) {
|
||||
// Tests without parallel mode should run serially as a single group.
|
||||
if (withRequireFile.general.tests.length) result.push(withRequireFile.general);
|
||||
|
||||
// Parallel test groups without beforeAll/afterAll can be run independently.
|
||||
result.push(...withRequireFile.parallel.values());
|
||||
|
||||
// Tests with beforeAll/afterAll should try to share workers as much as possible.
|
||||
const parallelWithHooksGroupSize = Math.ceil(withRequireFile.parallelWithHooks.tests.length / expectedParallelism);
|
||||
let lastGroup;
|
||||
for (const test of withRequireFile.parallelWithHooks.tests) {
|
||||
if (!lastGroup || lastGroup.tests.length >= parallelWithHooksGroupSize) {
|
||||
lastGroup = createGroup(test);
|
||||
result.push(lastGroup);
|
||||
}
|
||||
lastGroup.tests.push(test);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function filterForShard(shard, testGroups) {
|
||||
// Note that sharding works based on test groups.
|
||||
// This means parallel files will be sharded by single tests,
|
||||
// while non-parallel files will be sharded by the whole file.
|
||||
//
|
||||
// Shards are still balanced by the number of tests, not files,
|
||||
// even in the case of non-paralleled files.
|
||||
|
||||
let shardableTotal = 0;
|
||||
for (const group of testGroups) shardableTotal += group.tests.length;
|
||||
|
||||
// Each shard gets some tests.
|
||||
const shardSize = Math.floor(shardableTotal / shard.total);
|
||||
// First few shards get one more test each.
|
||||
const extraOne = shardableTotal - shardSize * shard.total;
|
||||
const currentShard = shard.current - 1; // Make it zero-based for calculations.
|
||||
const from = shardSize * currentShard + Math.min(extraOne, currentShard);
|
||||
const to = from + shardSize + (currentShard < extraOne ? 1 : 0);
|
||||
let current = 0;
|
||||
const result = new Set();
|
||||
for (const group of testGroups) {
|
||||
// Any test group goes to the shard that contains the first test of this group.
|
||||
// So, this shard gets any group that starts at [from; to)
|
||||
if (current >= from && current < to) result.add(group);
|
||||
current += group.tests.length;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
575
node_modules/playwright/lib/runner/testServer.js
generated
vendored
Normal file
575
node_modules/playwright/lib/runner/testServer.js
generated
vendored
Normal file
@@ -0,0 +1,575 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.TestServerDispatcher = void 0;
|
||||
exports.resolveCtDirs = resolveCtDirs;
|
||||
exports.runTestServer = runTestServer;
|
||||
exports.runUIMode = runUIMode;
|
||||
var _fs = _interopRequireDefault(require("fs"));
|
||||
var _path = _interopRequireDefault(require("path"));
|
||||
var _server = require("playwright-core/lib/server");
|
||||
var _utils = require("playwright-core/lib/utils");
|
||||
var _utilsBundle = require("playwright-core/lib/utilsBundle");
|
||||
var _reporters = require("./reporters");
|
||||
var _sigIntWatcher = require("./sigIntWatcher");
|
||||
var _tasks = require("./tasks");
|
||||
var _configLoader = require("../common/configLoader");
|
||||
var _fsWatcher = require("../fsWatcher");
|
||||
var _teleReceiver = require("../isomorphic/teleReceiver");
|
||||
var _gitCommitInfoPlugin = require("../plugins/gitCommitInfoPlugin");
|
||||
var _webServerPlugin = require("../plugins/webServerPlugin");
|
||||
var _base = require("../reporters/base");
|
||||
var _internalReporter = require("../reporters/internalReporter");
|
||||
var _list = _interopRequireDefault(require("../reporters/list"));
|
||||
var _compilationCache = require("../transform/compilationCache");
|
||||
var _util = require("../util");
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
const originalStdoutWrite = process.stdout.write;
|
||||
const originalStderrWrite = process.stderr.write;
|
||||
class TestServer {
|
||||
constructor(configLocation, configCLIOverrides) {
|
||||
this._configLocation = void 0;
|
||||
this._configCLIOverrides = void 0;
|
||||
this._dispatcher = void 0;
|
||||
this._configLocation = configLocation;
|
||||
this._configCLIOverrides = configCLIOverrides;
|
||||
}
|
||||
async start(options) {
|
||||
this._dispatcher = new TestServerDispatcher(this._configLocation, this._configCLIOverrides);
|
||||
return await (0, _server.startTraceViewerServer)({
|
||||
...options,
|
||||
transport: this._dispatcher.transport
|
||||
});
|
||||
}
|
||||
async stop() {
|
||||
var _this$_dispatcher, _this$_dispatcher2;
|
||||
await ((_this$_dispatcher = this._dispatcher) === null || _this$_dispatcher === void 0 ? void 0 : _this$_dispatcher._setInterceptStdio(false));
|
||||
await ((_this$_dispatcher2 = this._dispatcher) === null || _this$_dispatcher2 === void 0 ? void 0 : _this$_dispatcher2.runGlobalTeardown());
|
||||
}
|
||||
}
|
||||
class TestServerDispatcher {
|
||||
constructor(configLocation, configCLIOverrides) {
|
||||
this._configLocation = void 0;
|
||||
this._configCLIOverrides = void 0;
|
||||
this._watcher = void 0;
|
||||
this._watchedProjectDirs = new Set();
|
||||
this._ignoredProjectOutputs = new Set();
|
||||
this._watchedTestDependencies = new Set();
|
||||
this._testRun = void 0;
|
||||
this.transport = void 0;
|
||||
this._queue = Promise.resolve();
|
||||
this._globalSetup = void 0;
|
||||
this._devServer = void 0;
|
||||
this._dispatchEvent = void 0;
|
||||
this._plugins = void 0;
|
||||
this._serializer = require.resolve('./uiModeReporter');
|
||||
this._watchTestDirs = false;
|
||||
this._closeOnDisconnect = false;
|
||||
this._populateDependenciesOnList = false;
|
||||
this._configLocation = configLocation;
|
||||
this._configCLIOverrides = configCLIOverrides;
|
||||
this.transport = {
|
||||
onconnect: () => {},
|
||||
dispatch: (method, params) => this[method](params),
|
||||
onclose: () => {
|
||||
if (this._closeOnDisconnect) (0, _utils.gracefullyProcessExitDoNotHang)(0);
|
||||
}
|
||||
};
|
||||
this._watcher = new _fsWatcher.Watcher(events => {
|
||||
const collector = new Set();
|
||||
events.forEach(f => (0, _compilationCache.collectAffectedTestFiles)(f.file, collector));
|
||||
this._dispatchEvent('testFilesChanged', {
|
||||
testFiles: [...collector]
|
||||
});
|
||||
});
|
||||
this._dispatchEvent = (method, params) => {
|
||||
var _this$transport$sendE, _this$transport;
|
||||
return (_this$transport$sendE = (_this$transport = this.transport).sendEvent) === null || _this$transport$sendE === void 0 ? void 0 : _this$transport$sendE.call(_this$transport, method, params);
|
||||
};
|
||||
}
|
||||
async _wireReporter(messageSink) {
|
||||
return await (0, _reporters.createReporterForTestServer)(this._serializer, messageSink);
|
||||
}
|
||||
async _collectingInternalReporter(...extraReporters) {
|
||||
const report = [];
|
||||
const collectingReporter = await (0, _reporters.createReporterForTestServer)(this._serializer, e => report.push(e));
|
||||
return {
|
||||
reporter: new _internalReporter.InternalReporter([collectingReporter, ...extraReporters]),
|
||||
report
|
||||
};
|
||||
}
|
||||
async initialize(params) {
|
||||
// Note: this method can be called multiple times, for example from a new connection after UI mode reload.
|
||||
this._serializer = params.serializer || require.resolve('./uiModeReporter');
|
||||
this._closeOnDisconnect = !!params.closeOnDisconnect;
|
||||
await this._setInterceptStdio(!!params.interceptStdio);
|
||||
this._watchTestDirs = !!params.watchTestDirs;
|
||||
this._populateDependenciesOnList = !!params.populateDependenciesOnList;
|
||||
}
|
||||
async ping() {}
|
||||
async open(params) {
|
||||
if ((0, _utils.isUnderTest)()) return;
|
||||
// eslint-disable-next-line no-console
|
||||
(0, _utilsBundle.open)('vscode://file/' + params.location.file + ':' + params.location.line).catch(e => console.error(e));
|
||||
}
|
||||
async resizeTerminal(params) {
|
||||
process.stdout.columns = params.cols;
|
||||
process.stdout.rows = params.rows;
|
||||
process.stderr.columns = params.cols;
|
||||
process.stderr.columns = params.rows;
|
||||
}
|
||||
async checkBrowsers() {
|
||||
return {
|
||||
hasBrowsers: hasSomeBrowsers()
|
||||
};
|
||||
}
|
||||
async installBrowsers() {
|
||||
await installBrowsers();
|
||||
}
|
||||
async runGlobalSetup(params) {
|
||||
await this.runGlobalTeardown();
|
||||
const {
|
||||
reporter,
|
||||
report
|
||||
} = await this._collectingInternalReporter(new _list.default());
|
||||
const config = await this._loadConfigOrReportError(reporter, this._configCLIOverrides);
|
||||
if (!config) return {
|
||||
status: 'failed',
|
||||
report
|
||||
};
|
||||
const {
|
||||
status,
|
||||
cleanup
|
||||
} = await (0, _tasks.runTasksDeferCleanup)(new _tasks.TestRun(config, reporter), [...(0, _tasks.createGlobalSetupTasks)(config)]);
|
||||
if (status !== 'passed') await cleanup();else this._globalSetup = {
|
||||
cleanup,
|
||||
report
|
||||
};
|
||||
return {
|
||||
report,
|
||||
status
|
||||
};
|
||||
}
|
||||
async runGlobalTeardown() {
|
||||
const globalSetup = this._globalSetup;
|
||||
const status = await (globalSetup === null || globalSetup === void 0 ? void 0 : globalSetup.cleanup());
|
||||
this._globalSetup = undefined;
|
||||
return {
|
||||
status,
|
||||
report: (globalSetup === null || globalSetup === void 0 ? void 0 : globalSetup.report) || []
|
||||
};
|
||||
}
|
||||
async startDevServer(params) {
|
||||
await this.stopDevServer({});
|
||||
const {
|
||||
reporter,
|
||||
report
|
||||
} = await this._collectingInternalReporter();
|
||||
const config = await this._loadConfigOrReportError(reporter);
|
||||
if (!config) return {
|
||||
report,
|
||||
status: 'failed'
|
||||
};
|
||||
const {
|
||||
status,
|
||||
cleanup
|
||||
} = await (0, _tasks.runTasksDeferCleanup)(new _tasks.TestRun(config, reporter), [(0, _tasks.createLoadTask)('out-of-process', {
|
||||
failOnLoadErrors: true,
|
||||
filterOnly: false
|
||||
}), (0, _tasks.createStartDevServerTask)()]);
|
||||
if (status !== 'passed') await cleanup();else this._devServer = {
|
||||
cleanup,
|
||||
report
|
||||
};
|
||||
return {
|
||||
report,
|
||||
status
|
||||
};
|
||||
}
|
||||
async stopDevServer(params) {
|
||||
const devServer = this._devServer;
|
||||
const status = await (devServer === null || devServer === void 0 ? void 0 : devServer.cleanup());
|
||||
this._devServer = undefined;
|
||||
return {
|
||||
status,
|
||||
report: (devServer === null || devServer === void 0 ? void 0 : devServer.report) || []
|
||||
};
|
||||
}
|
||||
async clearCache(params) {
|
||||
const reporter = new _internalReporter.InternalReporter([]);
|
||||
const config = await this._loadConfigOrReportError(reporter);
|
||||
if (!config) return;
|
||||
await (0, _tasks.runTasks)(new _tasks.TestRun(config, reporter), [(0, _tasks.createClearCacheTask)(config)]);
|
||||
}
|
||||
async listFiles(params) {
|
||||
var _params$projects;
|
||||
const {
|
||||
reporter,
|
||||
report
|
||||
} = await this._collectingInternalReporter();
|
||||
const config = await this._loadConfigOrReportError(reporter);
|
||||
if (!config) return {
|
||||
status: 'failed',
|
||||
report
|
||||
};
|
||||
config.cliProjectFilter = (_params$projects = params.projects) !== null && _params$projects !== void 0 && _params$projects.length ? params.projects : undefined;
|
||||
const status = await (0, _tasks.runTasks)(new _tasks.TestRun(config, reporter), [(0, _tasks.createListFilesTask)(), (0, _tasks.createReportBeginTask)()]);
|
||||
return {
|
||||
report,
|
||||
status
|
||||
};
|
||||
}
|
||||
async listTests(params) {
|
||||
let result;
|
||||
this._queue = this._queue.then(async () => {
|
||||
const {
|
||||
config,
|
||||
report,
|
||||
status
|
||||
} = await this._innerListTests(params);
|
||||
if (config) await this._updateWatchedDirs(config);
|
||||
result = {
|
||||
report,
|
||||
status
|
||||
};
|
||||
}).catch(printInternalError);
|
||||
await this._queue;
|
||||
return result;
|
||||
}
|
||||
async _innerListTests(params) {
|
||||
var _params$projects2;
|
||||
const overrides = {
|
||||
...this._configCLIOverrides,
|
||||
repeatEach: 1,
|
||||
retries: 0
|
||||
};
|
||||
const {
|
||||
reporter,
|
||||
report
|
||||
} = await this._collectingInternalReporter();
|
||||
const config = await this._loadConfigOrReportError(reporter, overrides);
|
||||
if (!config) return {
|
||||
report,
|
||||
reporter,
|
||||
status: 'failed'
|
||||
};
|
||||
config.cliArgs = params.locations || [];
|
||||
config.cliGrep = params.grep;
|
||||
config.cliGrepInvert = params.grepInvert;
|
||||
config.cliProjectFilter = (_params$projects2 = params.projects) !== null && _params$projects2 !== void 0 && _params$projects2.length ? params.projects : undefined;
|
||||
config.cliListOnly = true;
|
||||
const status = await (0, _tasks.runTasks)(new _tasks.TestRun(config, reporter), [(0, _tasks.createLoadTask)('out-of-process', {
|
||||
failOnLoadErrors: false,
|
||||
filterOnly: false,
|
||||
populateDependencies: this._populateDependenciesOnList
|
||||
}), (0, _tasks.createReportBeginTask)()]);
|
||||
return {
|
||||
config,
|
||||
report,
|
||||
reporter,
|
||||
status
|
||||
};
|
||||
}
|
||||
async _updateWatchedDirs(config) {
|
||||
this._watchedProjectDirs = new Set();
|
||||
this._ignoredProjectOutputs = new Set();
|
||||
for (const p of config.projects) {
|
||||
this._watchedProjectDirs.add(p.project.testDir);
|
||||
this._ignoredProjectOutputs.add(p.project.outputDir);
|
||||
}
|
||||
const result = await resolveCtDirs(config);
|
||||
if (result) {
|
||||
this._watchedProjectDirs.add(result.templateDir);
|
||||
this._ignoredProjectOutputs.add(result.outDir);
|
||||
}
|
||||
if (this._watchTestDirs) await this._updateWatcher(false);
|
||||
}
|
||||
async _updateWatcher(reportPending) {
|
||||
await this._watcher.update([...this._watchedProjectDirs, ...this._watchedTestDependencies], [...this._ignoredProjectOutputs], reportPending);
|
||||
}
|
||||
async runTests(params) {
|
||||
let result = {
|
||||
status: 'passed'
|
||||
};
|
||||
this._queue = this._queue.then(async () => {
|
||||
result = await this._innerRunTests(params).catch(e => {
|
||||
printInternalError(e);
|
||||
return {
|
||||
status: 'failed'
|
||||
};
|
||||
});
|
||||
});
|
||||
await this._queue;
|
||||
return result;
|
||||
}
|
||||
async _innerRunTests(params) {
|
||||
var _params$projects3;
|
||||
await this.stopTests();
|
||||
const overrides = {
|
||||
...this._configCLIOverrides,
|
||||
repeatEach: 1,
|
||||
retries: 0,
|
||||
preserveOutputDir: true,
|
||||
reporter: params.reporters ? params.reporters.map(r => [r]) : undefined,
|
||||
use: {
|
||||
...this._configCLIOverrides.use,
|
||||
...(params.trace === 'on' ? {
|
||||
trace: {
|
||||
mode: 'on',
|
||||
sources: false,
|
||||
_live: true
|
||||
}
|
||||
} : {}),
|
||||
...(params.trace === 'off' ? {
|
||||
trace: 'off'
|
||||
} : {}),
|
||||
...(params.video === 'on' || params.video === 'off' ? {
|
||||
video: params.video
|
||||
} : {}),
|
||||
...(params.headed !== undefined ? {
|
||||
headless: !params.headed
|
||||
} : {}),
|
||||
_optionContextReuseMode: params.reuseContext ? 'when-possible' : undefined,
|
||||
_optionConnectOptions: params.connectWsEndpoint ? {
|
||||
wsEndpoint: params.connectWsEndpoint
|
||||
} : undefined
|
||||
},
|
||||
...(params.updateSnapshots ? {
|
||||
updateSnapshots: params.updateSnapshots
|
||||
} : {}),
|
||||
...(params.updateSourceMethod ? {
|
||||
updateSourceMethod: params.updateSourceMethod
|
||||
} : {}),
|
||||
...(params.workers ? {
|
||||
workers: params.workers
|
||||
} : {})
|
||||
};
|
||||
if (params.trace === 'on') process.env.PW_LIVE_TRACE_STACKS = '1';else process.env.PW_LIVE_TRACE_STACKS = undefined;
|
||||
const wireReporter = await this._wireReporter(e => this._dispatchEvent('report', e));
|
||||
const config = await this._loadConfigOrReportError(new _internalReporter.InternalReporter([wireReporter]), overrides);
|
||||
if (!config) return {
|
||||
status: 'failed'
|
||||
};
|
||||
const testIdSet = params.testIds ? new Set(params.testIds) : null;
|
||||
config.cliListOnly = false;
|
||||
config.cliPassWithNoTests = true;
|
||||
config.cliArgs = params.locations || [];
|
||||
config.cliGrep = params.grep;
|
||||
config.cliGrepInvert = params.grepInvert;
|
||||
config.cliProjectFilter = (_params$projects3 = params.projects) !== null && _params$projects3 !== void 0 && _params$projects3.length ? params.projects : undefined;
|
||||
config.testIdMatcher = testIdSet ? id => testIdSet.has(id) : undefined;
|
||||
const configReporters = await (0, _reporters.createReporters)(config, 'test', true);
|
||||
const reporter = new _internalReporter.InternalReporter([...configReporters, wireReporter]);
|
||||
const stop = new _utils.ManualPromise();
|
||||
const tasks = [(0, _tasks.createApplyRebaselinesTask)(), (0, _tasks.createLoadTask)('out-of-process', {
|
||||
filterOnly: true,
|
||||
failOnLoadErrors: false,
|
||||
doNotRunDepsOutsideProjectFilter: true
|
||||
}), ...(0, _tasks.createRunTestsTasks)(config)];
|
||||
const run = (0, _tasks.runTasks)(new _tasks.TestRun(config, reporter), tasks, 0, stop).then(async status => {
|
||||
this._testRun = undefined;
|
||||
return status;
|
||||
});
|
||||
this._testRun = {
|
||||
run,
|
||||
stop
|
||||
};
|
||||
return {
|
||||
status: await run
|
||||
};
|
||||
}
|
||||
async watch(params) {
|
||||
this._watchedTestDependencies = new Set();
|
||||
for (const fileName of params.fileNames) {
|
||||
this._watchedTestDependencies.add(fileName);
|
||||
(0, _compilationCache.dependenciesForTestFile)(fileName).forEach(file => this._watchedTestDependencies.add(file));
|
||||
}
|
||||
await this._updateWatcher(true);
|
||||
}
|
||||
async findRelatedTestFiles(params) {
|
||||
const errorReporter = (0, _reporters.createErrorCollectingReporter)(_base.internalScreen);
|
||||
const reporter = new _internalReporter.InternalReporter([errorReporter]);
|
||||
const config = await this._loadConfigOrReportError(reporter);
|
||||
if (!config) return {
|
||||
errors: errorReporter.errors(),
|
||||
testFiles: []
|
||||
};
|
||||
const status = await (0, _tasks.runTasks)(new _tasks.TestRun(config, reporter), [(0, _tasks.createLoadTask)('out-of-process', {
|
||||
failOnLoadErrors: true,
|
||||
filterOnly: false,
|
||||
populateDependencies: true
|
||||
})]);
|
||||
if (status !== 'passed') return {
|
||||
errors: errorReporter.errors(),
|
||||
testFiles: []
|
||||
};
|
||||
return {
|
||||
testFiles: (0, _compilationCache.affectedTestFiles)(params.files)
|
||||
};
|
||||
}
|
||||
async stopTests() {
|
||||
var _this$_testRun, _this$_testRun2;
|
||||
(_this$_testRun = this._testRun) === null || _this$_testRun === void 0 || (_this$_testRun = _this$_testRun.stop) === null || _this$_testRun === void 0 || _this$_testRun.resolve();
|
||||
await ((_this$_testRun2 = this._testRun) === null || _this$_testRun2 === void 0 ? void 0 : _this$_testRun2.run);
|
||||
}
|
||||
async _setInterceptStdio(intercept) {
|
||||
if (process.env.PWTEST_DEBUG) return;
|
||||
if (intercept) {
|
||||
process.stdout.write = chunk => {
|
||||
this._dispatchEvent('stdio', chunkToPayload('stdout', chunk));
|
||||
return true;
|
||||
};
|
||||
process.stderr.write = chunk => {
|
||||
this._dispatchEvent('stdio', chunkToPayload('stderr', chunk));
|
||||
return true;
|
||||
};
|
||||
} else {
|
||||
process.stdout.write = originalStdoutWrite;
|
||||
process.stderr.write = originalStderrWrite;
|
||||
}
|
||||
}
|
||||
async closeGracefully() {
|
||||
(0, _utils.gracefullyProcessExitDoNotHang)(0);
|
||||
}
|
||||
async _loadConfig(overrides) {
|
||||
try {
|
||||
const config = await (0, _configLoader.loadConfig)(this._configLocation, overrides);
|
||||
// Preserve plugin instances between setup and build.
|
||||
if (!this._plugins) {
|
||||
(0, _webServerPlugin.webServerPluginsForConfig)(config).forEach(p => config.plugins.push({
|
||||
factory: p
|
||||
}));
|
||||
(0, _gitCommitInfoPlugin.addGitCommitInfoPlugin)(config);
|
||||
this._plugins = config.plugins || [];
|
||||
} else {
|
||||
config.plugins.splice(0, config.plugins.length, ...this._plugins);
|
||||
}
|
||||
return {
|
||||
config
|
||||
};
|
||||
} catch (e) {
|
||||
return {
|
||||
config: null,
|
||||
error: (0, _util.serializeError)(e)
|
||||
};
|
||||
}
|
||||
}
|
||||
async _loadConfigOrReportError(reporter, overrides) {
|
||||
const {
|
||||
config,
|
||||
error
|
||||
} = await this._loadConfig(overrides);
|
||||
if (config) return config;
|
||||
// Produce dummy config when it has an error.
|
||||
reporter.onConfigure(_teleReceiver.baseFullConfig);
|
||||
reporter.onError(error);
|
||||
await reporter.onEnd({
|
||||
status: 'failed'
|
||||
});
|
||||
await reporter.onExit();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
exports.TestServerDispatcher = TestServerDispatcher;
|
||||
async function runUIMode(configFile, configCLIOverrides, options) {
|
||||
const configLocation = (0, _configLoader.resolveConfigLocation)(configFile);
|
||||
return await innerRunTestServer(configLocation, configCLIOverrides, options, async (server, cancelPromise) => {
|
||||
await (0, _server.installRootRedirect)(server, [], {
|
||||
...options,
|
||||
webApp: 'uiMode.html'
|
||||
});
|
||||
if (options.host !== undefined || options.port !== undefined) {
|
||||
await (0, _server.openTraceInBrowser)(server.urlPrefix('human-readable'));
|
||||
} else {
|
||||
const page = await (0, _server.openTraceViewerApp)(server.urlPrefix('precise'), 'chromium', {
|
||||
headless: (0, _utils.isUnderTest)() && process.env.PWTEST_HEADED_FOR_TEST !== '1',
|
||||
persistentContextOptions: {
|
||||
handleSIGINT: false
|
||||
}
|
||||
});
|
||||
page.on('close', () => cancelPromise.resolve());
|
||||
}
|
||||
});
|
||||
}
|
||||
async function runTestServer(configFile, configCLIOverrides, options) {
|
||||
const configLocation = (0, _configLoader.resolveConfigLocation)(configFile);
|
||||
return await innerRunTestServer(configLocation, configCLIOverrides, options, async server => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log('Listening on ' + server.urlPrefix('precise').replace('http:', 'ws:') + '/' + server.wsGuid());
|
||||
});
|
||||
}
|
||||
async function innerRunTestServer(configLocation, configCLIOverrides, options, openUI) {
|
||||
if ((0, _configLoader.restartWithExperimentalTsEsm)(undefined, true)) return 'restarted';
|
||||
const testServer = new TestServer(configLocation, configCLIOverrides);
|
||||
const cancelPromise = new _utils.ManualPromise();
|
||||
const sigintWatcher = new _sigIntWatcher.SigIntWatcher();
|
||||
process.stdin.on('close', () => (0, _utils.gracefullyProcessExitDoNotHang)(0));
|
||||
void sigintWatcher.promise().then(() => cancelPromise.resolve());
|
||||
try {
|
||||
const server = await testServer.start(options);
|
||||
await openUI(server, cancelPromise, configLocation);
|
||||
await cancelPromise;
|
||||
} finally {
|
||||
await testServer.stop();
|
||||
sigintWatcher.disarm();
|
||||
}
|
||||
return sigintWatcher.hadSignal() ? 'interrupted' : 'passed';
|
||||
}
|
||||
function chunkToPayload(type, chunk) {
|
||||
if (chunk instanceof Uint8Array) return {
|
||||
type,
|
||||
buffer: chunk.toString('base64')
|
||||
};
|
||||
return {
|
||||
type,
|
||||
text: chunk
|
||||
};
|
||||
}
|
||||
function hasSomeBrowsers() {
|
||||
for (const browserName of ['chromium', 'webkit', 'firefox']) {
|
||||
try {
|
||||
_server.registry.findExecutable(browserName).executablePathOrDie('javascript');
|
||||
return true;
|
||||
} catch {}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
async function installBrowsers() {
|
||||
const executables = _server.registry.defaultExecutables();
|
||||
await _server.registry.install(executables, false);
|
||||
}
|
||||
function printInternalError(e) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error('Internal error:', e);
|
||||
}
|
||||
|
||||
// TODO: remove CT dependency.
|
||||
async function resolveCtDirs(config) {
|
||||
const use = config.config.projects[0].use;
|
||||
const relativeTemplateDir = use.ctTemplateDir || 'playwright';
|
||||
const templateDir = await _fs.default.promises.realpath(_path.default.normalize(_path.default.join(config.configDir, relativeTemplateDir))).catch(() => undefined);
|
||||
if (!templateDir) return null;
|
||||
const outDir = use.ctCacheDir ? _path.default.resolve(config.configDir, use.ctCacheDir) : _path.default.resolve(templateDir, '.cache');
|
||||
return {
|
||||
outDir,
|
||||
templateDir
|
||||
};
|
||||
}
|
||||
31
node_modules/playwright/lib/runner/uiModeReporter.js
generated
vendored
Normal file
31
node_modules/playwright/lib/runner/uiModeReporter.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = void 0;
|
||||
var _teleEmitter = require("../reporters/teleEmitter");
|
||||
/**
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
class UIModeReporter extends _teleEmitter.TeleReporterEmitter {
|
||||
constructor(options) {
|
||||
super(options._send, {
|
||||
omitBuffers: true
|
||||
});
|
||||
}
|
||||
}
|
||||
var _default = exports.default = UIModeReporter;
|
||||
55
node_modules/playwright/lib/runner/vcs.js
generated
vendored
Normal file
55
node_modules/playwright/lib/runner/vcs.js
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.detectChangedTestFiles = detectChangedTestFiles;
|
||||
var _child_process = _interopRequireDefault(require("child_process"));
|
||||
var _path = _interopRequireDefault(require("path"));
|
||||
var _compilationCache = require("../transform/compilationCache");
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
async function detectChangedTestFiles(baseCommit, configDir) {
|
||||
function gitFileList(command) {
|
||||
try {
|
||||
return _child_process.default.execSync(`git ${command}`, {
|
||||
encoding: 'utf-8',
|
||||
stdio: 'pipe',
|
||||
cwd: configDir
|
||||
}).split('\n').filter(Boolean);
|
||||
} catch (_error) {
|
||||
const error = _error;
|
||||
const unknownRevision = error.output.some(line => line === null || line === void 0 ? void 0 : line.includes('unknown revision'));
|
||||
if (unknownRevision) {
|
||||
const isShallowClone = _child_process.default.execSync('git rev-parse --is-shallow-repository', {
|
||||
encoding: 'utf-8',
|
||||
stdio: 'pipe',
|
||||
cwd: configDir
|
||||
}).trim() === 'true';
|
||||
if (isShallowClone) {
|
||||
throw new Error([`The repository is a shallow clone and does not have '${baseCommit}' available locally.`, `Note that GitHub Actions checkout is shallow by default: https://github.com/actions/checkout`].join('\n'));
|
||||
}
|
||||
}
|
||||
throw new Error([`Cannot detect changed files for --only-changed mode:`, `git ${command}`, '', ...error.output].join('\n'));
|
||||
}
|
||||
}
|
||||
const untrackedFiles = gitFileList(`ls-files --others --exclude-standard`).map(file => _path.default.join(configDir, file));
|
||||
const [gitRoot] = gitFileList('rev-parse --show-toplevel');
|
||||
const trackedFilesWithChanges = gitFileList(`diff ${baseCommit} --name-only`).map(file => _path.default.join(gitRoot, file));
|
||||
return new Set((0, _compilationCache.affectedTestFiles)([...untrackedFiles, ...trackedFilesWithChanges]));
|
||||
}
|
||||
422
node_modules/playwright/lib/runner/watchMode.js
generated
vendored
Normal file
422
node_modules/playwright/lib/runner/watchMode.js
generated
vendored
Normal file
@@ -0,0 +1,422 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.runWatchModeLoop = runWatchModeLoop;
|
||||
var _path = _interopRequireDefault(require("path"));
|
||||
var _readline = _interopRequireDefault(require("readline"));
|
||||
var _stream = require("stream");
|
||||
var _playwrightServer = require("playwright-core/lib/remote/playwrightServer");
|
||||
var _utils = require("playwright-core/lib/utils");
|
||||
var _base = require("../reporters/base");
|
||||
var _utilsBundle = require("../utilsBundle");
|
||||
var _testServer = require("./testServer");
|
||||
var _configLoader = require("../common/configLoader");
|
||||
var _teleSuiteUpdater = require("../isomorphic/teleSuiteUpdater");
|
||||
var _testServerConnection = require("../isomorphic/testServerConnection");
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
class InMemoryTransport extends _stream.EventEmitter {
|
||||
constructor(send) {
|
||||
super();
|
||||
this._send = void 0;
|
||||
this._send = send;
|
||||
}
|
||||
close() {
|
||||
this.emit('close');
|
||||
}
|
||||
onclose(listener) {
|
||||
this.on('close', listener);
|
||||
}
|
||||
onerror(listener) {
|
||||
// no-op to fulfil the interface, the user of InMemoryTransport doesn't emit any errors.
|
||||
}
|
||||
onmessage(listener) {
|
||||
this.on('message', listener);
|
||||
}
|
||||
onopen(listener) {
|
||||
this.on('open', listener);
|
||||
}
|
||||
send(data) {
|
||||
this._send(data);
|
||||
}
|
||||
}
|
||||
async function runWatchModeLoop(configLocation, initialOptions) {
|
||||
if ((0, _configLoader.restartWithExperimentalTsEsm)(undefined, true)) return 'restarted';
|
||||
const options = {
|
||||
...initialOptions
|
||||
};
|
||||
let bufferMode = false;
|
||||
const testServerDispatcher = new _testServer.TestServerDispatcher(configLocation, {});
|
||||
const transport = new InMemoryTransport(async data => {
|
||||
const {
|
||||
id,
|
||||
method,
|
||||
params
|
||||
} = JSON.parse(data);
|
||||
try {
|
||||
const result = await testServerDispatcher.transport.dispatch(method, params);
|
||||
transport.emit('message', JSON.stringify({
|
||||
id,
|
||||
result
|
||||
}));
|
||||
} catch (e) {
|
||||
transport.emit('message', JSON.stringify({
|
||||
id,
|
||||
error: String(e)
|
||||
}));
|
||||
}
|
||||
});
|
||||
testServerDispatcher.transport.sendEvent = (method, params) => {
|
||||
transport.emit('message', JSON.stringify({
|
||||
method,
|
||||
params
|
||||
}));
|
||||
};
|
||||
const testServerConnection = new _testServerConnection.TestServerConnection(transport);
|
||||
transport.emit('open');
|
||||
const teleSuiteUpdater = new _teleSuiteUpdater.TeleSuiteUpdater({
|
||||
pathSeparator: _path.default.sep,
|
||||
onUpdate() {}
|
||||
});
|
||||
const dirtyTestFiles = new Set();
|
||||
const dirtyTestIds = new Set();
|
||||
let onDirtyTests = new _utils.ManualPromise();
|
||||
let queue = Promise.resolve();
|
||||
const changedFiles = new Set();
|
||||
testServerConnection.onTestFilesChanged(({
|
||||
testFiles
|
||||
}) => {
|
||||
testFiles.forEach(file => changedFiles.add(file));
|
||||
queue = queue.then(async () => {
|
||||
if (changedFiles.size === 0) return;
|
||||
const {
|
||||
report
|
||||
} = await testServerConnection.listTests({
|
||||
locations: options.files,
|
||||
projects: options.projects,
|
||||
grep: options.grep
|
||||
});
|
||||
teleSuiteUpdater.processListReport(report);
|
||||
for (const test of teleSuiteUpdater.rootSuite.allTests()) {
|
||||
if (changedFiles.has(test.location.file)) {
|
||||
dirtyTestFiles.add(test.location.file);
|
||||
dirtyTestIds.add(test.id);
|
||||
}
|
||||
}
|
||||
changedFiles.clear();
|
||||
if (dirtyTestIds.size > 0) {
|
||||
onDirtyTests.resolve('changed');
|
||||
onDirtyTests = new _utils.ManualPromise();
|
||||
}
|
||||
});
|
||||
});
|
||||
testServerConnection.onReport(report => teleSuiteUpdater.processTestReportEvent(report));
|
||||
await testServerConnection.initialize({
|
||||
interceptStdio: false,
|
||||
watchTestDirs: true,
|
||||
populateDependenciesOnList: true
|
||||
});
|
||||
await testServerConnection.runGlobalSetup({});
|
||||
const {
|
||||
report
|
||||
} = await testServerConnection.listTests({});
|
||||
teleSuiteUpdater.processListReport(report);
|
||||
const projectNames = teleSuiteUpdater.rootSuite.suites.map(s => s.title);
|
||||
let lastRun = {
|
||||
type: 'regular'
|
||||
};
|
||||
let result = 'passed';
|
||||
while (true) {
|
||||
if (bufferMode) printBufferPrompt(dirtyTestFiles, teleSuiteUpdater.config.rootDir);else printPrompt();
|
||||
const waitForCommand = readCommand();
|
||||
const command = await Promise.race([onDirtyTests, waitForCommand.result]);
|
||||
if (command === 'changed') waitForCommand.cancel();
|
||||
if (bufferMode && command === 'changed') continue;
|
||||
const shouldRunChangedFiles = bufferMode ? command === 'run' : command === 'changed';
|
||||
if (shouldRunChangedFiles) {
|
||||
if (dirtyTestIds.size === 0) continue;
|
||||
const testIds = [...dirtyTestIds];
|
||||
dirtyTestIds.clear();
|
||||
dirtyTestFiles.clear();
|
||||
await runTests(options, testServerConnection, {
|
||||
testIds,
|
||||
title: 'files changed'
|
||||
});
|
||||
lastRun = {
|
||||
type: 'changed',
|
||||
dirtyTestIds: testIds
|
||||
};
|
||||
continue;
|
||||
}
|
||||
if (command === 'run') {
|
||||
// All means reset filters.
|
||||
await runTests(options, testServerConnection);
|
||||
lastRun = {
|
||||
type: 'regular'
|
||||
};
|
||||
continue;
|
||||
}
|
||||
if (command === 'project') {
|
||||
const {
|
||||
selectedProjects
|
||||
} = await _utilsBundle.enquirer.prompt({
|
||||
type: 'multiselect',
|
||||
name: 'selectedProjects',
|
||||
message: 'Select projects',
|
||||
choices: projectNames
|
||||
}).catch(() => ({
|
||||
selectedProjects: null
|
||||
}));
|
||||
if (!selectedProjects) continue;
|
||||
options.projects = selectedProjects.length ? selectedProjects : undefined;
|
||||
await runTests(options, testServerConnection);
|
||||
lastRun = {
|
||||
type: 'regular'
|
||||
};
|
||||
continue;
|
||||
}
|
||||
if (command === 'file') {
|
||||
const {
|
||||
filePattern
|
||||
} = await _utilsBundle.enquirer.prompt({
|
||||
type: 'text',
|
||||
name: 'filePattern',
|
||||
message: 'Input filename pattern (regex)'
|
||||
}).catch(() => ({
|
||||
filePattern: null
|
||||
}));
|
||||
if (filePattern === null) continue;
|
||||
if (filePattern.trim()) options.files = filePattern.split(' ');else options.files = undefined;
|
||||
await runTests(options, testServerConnection);
|
||||
lastRun = {
|
||||
type: 'regular'
|
||||
};
|
||||
continue;
|
||||
}
|
||||
if (command === 'grep') {
|
||||
const {
|
||||
testPattern
|
||||
} = await _utilsBundle.enquirer.prompt({
|
||||
type: 'text',
|
||||
name: 'testPattern',
|
||||
message: 'Input test name pattern (regex)'
|
||||
}).catch(() => ({
|
||||
testPattern: null
|
||||
}));
|
||||
if (testPattern === null) continue;
|
||||
if (testPattern.trim()) options.grep = testPattern;else options.grep = undefined;
|
||||
await runTests(options, testServerConnection);
|
||||
lastRun = {
|
||||
type: 'regular'
|
||||
};
|
||||
continue;
|
||||
}
|
||||
if (command === 'failed') {
|
||||
const failedTestIds = teleSuiteUpdater.rootSuite.allTests().filter(t => !t.ok()).map(t => t.id);
|
||||
await runTests({}, testServerConnection, {
|
||||
title: 'running failed tests',
|
||||
testIds: failedTestIds
|
||||
});
|
||||
lastRun = {
|
||||
type: 'failed',
|
||||
failedTestIds
|
||||
};
|
||||
continue;
|
||||
}
|
||||
if (command === 'repeat') {
|
||||
if (lastRun.type === 'regular') {
|
||||
await runTests(options, testServerConnection, {
|
||||
title: 're-running tests'
|
||||
});
|
||||
continue;
|
||||
} else if (lastRun.type === 'changed') {
|
||||
await runTests(options, testServerConnection, {
|
||||
title: 're-running tests',
|
||||
testIds: lastRun.dirtyTestIds
|
||||
});
|
||||
} else if (lastRun.type === 'failed') {
|
||||
await runTests({}, testServerConnection, {
|
||||
title: 're-running tests',
|
||||
testIds: lastRun.failedTestIds
|
||||
});
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (command === 'toggle-show-browser') {
|
||||
await toggleShowBrowser();
|
||||
continue;
|
||||
}
|
||||
if (command === 'toggle-buffer-mode') {
|
||||
bufferMode = !bufferMode;
|
||||
continue;
|
||||
}
|
||||
if (command === 'exit') break;
|
||||
if (command === 'interrupted') {
|
||||
result = 'interrupted';
|
||||
break;
|
||||
}
|
||||
}
|
||||
const teardown = await testServerConnection.runGlobalTeardown({});
|
||||
return result === 'passed' ? teardown.status : result;
|
||||
}
|
||||
function readKeyPress(handler) {
|
||||
const promise = new _utils.ManualPromise();
|
||||
const rl = _readline.default.createInterface({
|
||||
input: process.stdin,
|
||||
escapeCodeTimeout: 50
|
||||
});
|
||||
_readline.default.emitKeypressEvents(process.stdin, rl);
|
||||
if (process.stdin.isTTY) process.stdin.setRawMode(true);
|
||||
const listener = _utils.eventsHelper.addEventListener(process.stdin, 'keypress', (text, key) => {
|
||||
const result = handler(text, key);
|
||||
if (result) promise.resolve(result);
|
||||
});
|
||||
const cancel = () => {
|
||||
_utils.eventsHelper.removeEventListeners([listener]);
|
||||
rl.close();
|
||||
if (process.stdin.isTTY) process.stdin.setRawMode(false);
|
||||
};
|
||||
void promise.finally(cancel);
|
||||
return {
|
||||
result: promise,
|
||||
cancel
|
||||
};
|
||||
}
|
||||
const isInterrupt = (text, key) => text === '\x03' || text === '\x1B' || key && key.name === 'escape' || key && key.ctrl && key.name === 'c';
|
||||
async function runTests(watchOptions, testServerConnection, options) {
|
||||
printConfiguration(watchOptions, options === null || options === void 0 ? void 0 : options.title);
|
||||
const waitForDone = readKeyPress((text, key) => {
|
||||
if (isInterrupt(text, key)) {
|
||||
testServerConnection.stopTestsNoReply({});
|
||||
return 'done';
|
||||
}
|
||||
});
|
||||
await testServerConnection.runTests({
|
||||
grep: watchOptions.grep,
|
||||
testIds: options === null || options === void 0 ? void 0 : options.testIds,
|
||||
locations: watchOptions === null || watchOptions === void 0 ? void 0 : watchOptions.files,
|
||||
projects: watchOptions.projects,
|
||||
connectWsEndpoint,
|
||||
reuseContext: connectWsEndpoint ? true : undefined,
|
||||
workers: connectWsEndpoint ? 1 : undefined,
|
||||
headed: connectWsEndpoint ? true : undefined
|
||||
}).finally(() => waitForDone.cancel());
|
||||
}
|
||||
function readCommand() {
|
||||
return readKeyPress((text, key) => {
|
||||
if (isInterrupt(text, key)) return 'interrupted';
|
||||
if (process.platform !== 'win32' && key && key.ctrl && key.name === 'z') {
|
||||
process.kill(process.ppid, 'SIGTSTP');
|
||||
process.kill(process.pid, 'SIGTSTP');
|
||||
}
|
||||
const name = key === null || key === void 0 ? void 0 : key.name;
|
||||
if (name === 'q') return 'exit';
|
||||
if (name === 'h') {
|
||||
process.stdout.write(`${(0, _base.separator)(_base.terminalScreen)}
|
||||
Run tests
|
||||
${_utils.colors.bold('enter')} ${_utils.colors.dim('run tests')}
|
||||
${_utils.colors.bold('f')} ${_utils.colors.dim('run failed tests')}
|
||||
${_utils.colors.bold('r')} ${_utils.colors.dim('repeat last run')}
|
||||
${_utils.colors.bold('q')} ${_utils.colors.dim('quit')}
|
||||
|
||||
Change settings
|
||||
${_utils.colors.bold('c')} ${_utils.colors.dim('set project')}
|
||||
${_utils.colors.bold('p')} ${_utils.colors.dim('set file filter')}
|
||||
${_utils.colors.bold('t')} ${_utils.colors.dim('set title filter')}
|
||||
${_utils.colors.bold('s')} ${_utils.colors.dim('toggle show & reuse the browser')}
|
||||
${_utils.colors.bold('b')} ${_utils.colors.dim('toggle buffer mode')}
|
||||
`);
|
||||
return;
|
||||
}
|
||||
switch (name) {
|
||||
case 'return':
|
||||
return 'run';
|
||||
case 'r':
|
||||
return 'repeat';
|
||||
case 'c':
|
||||
return 'project';
|
||||
case 'p':
|
||||
return 'file';
|
||||
case 't':
|
||||
return 'grep';
|
||||
case 'f':
|
||||
return 'failed';
|
||||
case 's':
|
||||
return 'toggle-show-browser';
|
||||
case 'b':
|
||||
return 'toggle-buffer-mode';
|
||||
}
|
||||
});
|
||||
}
|
||||
let showBrowserServer;
|
||||
let connectWsEndpoint = undefined;
|
||||
let seq = 1;
|
||||
function printConfiguration(options, title) {
|
||||
const packageManagerCommand = (0, _utils.getPackageManagerExecCommand)();
|
||||
const tokens = [];
|
||||
tokens.push(`${packageManagerCommand} playwright test`);
|
||||
if (options.projects) tokens.push(...options.projects.map(p => _utils.colors.blue(`--project ${p}`)));
|
||||
if (options.grep) tokens.push(_utils.colors.red(`--grep ${options.grep}`));
|
||||
if (options.files) tokens.push(...options.files.map(a => _utils.colors.bold(a)));
|
||||
if (title) tokens.push(_utils.colors.dim(`(${title})`));
|
||||
tokens.push(_utils.colors.dim(`#${seq++}`));
|
||||
const lines = [];
|
||||
const sep = (0, _base.separator)(_base.terminalScreen);
|
||||
lines.push('\x1Bc' + sep);
|
||||
lines.push(`${tokens.join(' ')}`);
|
||||
lines.push(`${_utils.colors.dim('Show & reuse browser:')} ${_utils.colors.bold(showBrowserServer ? 'on' : 'off')}`);
|
||||
process.stdout.write(lines.join('\n'));
|
||||
}
|
||||
function printBufferPrompt(dirtyTestFiles, rootDir) {
|
||||
const sep = (0, _base.separator)(_base.terminalScreen);
|
||||
process.stdout.write('\x1Bc');
|
||||
process.stdout.write(`${sep}\n`);
|
||||
if (dirtyTestFiles.size === 0) {
|
||||
process.stdout.write(`${_utils.colors.dim('Waiting for file changes. Press')} ${_utils.colors.bold('q')} ${_utils.colors.dim('to quit or')} ${_utils.colors.bold('h')} ${_utils.colors.dim('for more options.')}\n\n`);
|
||||
return;
|
||||
}
|
||||
process.stdout.write(`${_utils.colors.dim(`${dirtyTestFiles.size} test ${dirtyTestFiles.size === 1 ? 'file' : 'files'} changed:`)}\n\n`);
|
||||
for (const file of dirtyTestFiles) process.stdout.write(` · ${_path.default.relative(rootDir, file)}\n`);
|
||||
process.stdout.write(`\n${_utils.colors.dim(`Press`)} ${_utils.colors.bold('enter')} ${_utils.colors.dim('to run')}, ${_utils.colors.bold('q')} ${_utils.colors.dim('to quit or')} ${_utils.colors.bold('h')} ${_utils.colors.dim('for more options.')}\n\n`);
|
||||
}
|
||||
function printPrompt() {
|
||||
const sep = (0, _base.separator)(_base.terminalScreen);
|
||||
process.stdout.write(`
|
||||
${sep}
|
||||
${_utils.colors.dim('Waiting for file changes. Press')} ${_utils.colors.bold('enter')} ${_utils.colors.dim('to run tests')}, ${_utils.colors.bold('q')} ${_utils.colors.dim('to quit or')} ${_utils.colors.bold('h')} ${_utils.colors.dim('for more options.')}
|
||||
`);
|
||||
}
|
||||
async function toggleShowBrowser() {
|
||||
if (!showBrowserServer) {
|
||||
showBrowserServer = new _playwrightServer.PlaywrightServer({
|
||||
mode: 'extension',
|
||||
path: '/' + (0, _utils.createGuid)(),
|
||||
maxConnections: 1
|
||||
});
|
||||
connectWsEndpoint = await showBrowserServer.listen();
|
||||
process.stdout.write(`${_utils.colors.dim('Show & reuse browser:')} ${_utils.colors.bold('on')}\n`);
|
||||
} else {
|
||||
var _showBrowserServer;
|
||||
await ((_showBrowserServer = showBrowserServer) === null || _showBrowserServer === void 0 ? void 0 : _showBrowserServer.close());
|
||||
showBrowserServer = undefined;
|
||||
connectWsEndpoint = undefined;
|
||||
process.stdout.write(`${_utils.colors.dim('Show & reuse browser:')} ${_utils.colors.bold('off')}\n`);
|
||||
}
|
||||
}
|
||||
85
node_modules/playwright/lib/runner/workerHost.js
generated
vendored
Normal file
85
node_modules/playwright/lib/runner/workerHost.js
generated
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.WorkerHost = void 0;
|
||||
var _fs = _interopRequireDefault(require("fs"));
|
||||
var _path = _interopRequireDefault(require("path"));
|
||||
var _utils = require("playwright-core/lib/utils");
|
||||
var _processHost = require("./processHost");
|
||||
var _ipc = require("../common/ipc");
|
||||
var _folders = require("../isomorphic/folders");
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
let lastWorkerIndex = 0;
|
||||
class WorkerHost extends _processHost.ProcessHost {
|
||||
constructor(testGroup, parallelIndex, config, extraEnv, outputDir) {
|
||||
const workerIndex = lastWorkerIndex++;
|
||||
super(require.resolve('../worker/workerMain.js'), `worker-${workerIndex}`, {
|
||||
...extraEnv,
|
||||
FORCE_COLOR: '1',
|
||||
DEBUG_COLORS: process.env.DEBUG_COLORS === undefined ? '1' : process.env.DEBUG_COLORS
|
||||
});
|
||||
this.parallelIndex = void 0;
|
||||
this.workerIndex = void 0;
|
||||
this._hash = void 0;
|
||||
this._params = void 0;
|
||||
this._didFail = false;
|
||||
this.workerIndex = workerIndex;
|
||||
this.parallelIndex = parallelIndex;
|
||||
this._hash = testGroup.workerHash;
|
||||
this._params = {
|
||||
workerIndex: this.workerIndex,
|
||||
parallelIndex,
|
||||
repeatEachIndex: testGroup.repeatEachIndex,
|
||||
projectId: testGroup.projectId,
|
||||
config,
|
||||
artifactsDir: _path.default.join(outputDir, (0, _folders.artifactsFolderName)(workerIndex))
|
||||
};
|
||||
}
|
||||
async start() {
|
||||
await _fs.default.promises.mkdir(this._params.artifactsDir, {
|
||||
recursive: true
|
||||
});
|
||||
return await this.startRunner(this._params, {
|
||||
onStdOut: chunk => this.emit('stdOut', (0, _ipc.stdioChunkToParams)(chunk)),
|
||||
onStdErr: chunk => this.emit('stdErr', (0, _ipc.stdioChunkToParams)(chunk))
|
||||
});
|
||||
}
|
||||
async onExit() {
|
||||
await (0, _utils.removeFolders)([this._params.artifactsDir]);
|
||||
}
|
||||
async stop(didFail) {
|
||||
if (didFail) this._didFail = true;
|
||||
await super.stop();
|
||||
}
|
||||
runTestGroup(runPayload) {
|
||||
this.sendMessageNoReply({
|
||||
method: 'runTestGroup',
|
||||
params: runPayload
|
||||
});
|
||||
}
|
||||
hash() {
|
||||
return this._hash;
|
||||
}
|
||||
didFail() {
|
||||
return this._didFail;
|
||||
}
|
||||
}
|
||||
exports.WorkerHost = WorkerHost;
|
||||
Reference in New Issue
Block a user