diff --git a/.github/workflows/webdriver.yml b/.github/workflows/webdriver.yml
index 98cef5bbd..74e1a9882 100644
--- a/.github/workflows/webdriver.yml
+++ b/.github/workflows/webdriver.yml
@@ -38,7 +38,7 @@ jobs:
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: true
- name: start a server
run: 'php -S 127.0.0.1:8000 -t test/data/app &'
- - name: Check CodeceptJS can be started
+ - name: check
run: './bin/codecept.js check -c test/acceptance/codecept.WebDriver.js'
- name: run unit tests
run: ./node_modules/.bin/mocha test/helper/WebDriver_test.js --exit
diff --git a/docs/plugins.md b/docs/plugins.md
index d18f534c4..9c88deeab 100644
--- a/docs/plugins.md
+++ b/docs/plugins.md
@@ -7,6 +7,12 @@ title: Plugins
+## analyze
+
+### Parameters
+
+- `config` **any** (optional, default `{}`)
+
## autoDelay
Sometimes it takes some time for a page to respond to user's actions.
@@ -521,29 +527,13 @@ I.click('=sign-up') // matches => [data-qa=sign-up],[data-test=sign-up]
- `config`
-## debugErrors
-
-Prints errors found in HTML code after each failed test.
+## customReporter
-It scans HTML and searches for elements with error classes.
-If an element found prints a text from it to console and adds as artifact to the test.
-
-Enable this plugin in config:
-
-```js
-plugins: {
- debugErrors: {
- enabled: true,
-}
-```
-
-Additional config options:
-
-- `errorClasses` - list of classes to search for errors (default: `['error', 'warning', 'alert', 'danger']`)
+Sample custom reporter for CodeceptJS.
### Parameters
-- `config` (optional, default `{}`)
+- `config`
## eachElement
@@ -672,6 +662,32 @@ More config options are available:
- `config` (optional, default `{}`)
+## pageInfo
+
+Collects information from web page after each failed test and adds it to the test as an artifact.
+It is suggested to enable this plugin if you run tests on CI and you need to debug failed tests.
+This plugin can be paired with `analyze` plugin to provide more context.
+
+It collects URL, HTML errors (by classes), and browser logs.
+
+Enable this plugin in config:
+
+```js
+plugins: {
+ pageInfo: {
+ enabled: true,
+}
+```
+
+Additional config options:
+
+- `errorClasses` - list of classes to search for errors (default: `['error', 'warning', 'alert', 'danger']`)
+- `browserLogs` - list of types of errors to search for in browser logs (default: `['error']`)
+
+### Parameters
+
+- `config` (optional, default `{}`)
+
## pauseOnFail
Automatically launches [interactive pause][11] when a test fails.
diff --git a/examples/codecept.config.js b/examples/codecept.config.js
index 2f4b03d64..ceac01db0 100644
--- a/examples/codecept.config.js
+++ b/examples/codecept.config.js
@@ -1,4 +1,4 @@
-require('./heal_recipes');
+require('./heal_recipes')
exports.config = {
output: './output',
@@ -34,22 +34,21 @@ exports.config = {
},
gherkin: {
features: './features/*.feature',
- steps: [
- './step_definitions/steps.js',
- ],
+ steps: ['./step_definitions/steps.js'],
},
plugins: {
- tryTo: {
- enabled: true,
- },
- heal: {
+ analyze: {
enabled: true,
},
+ // heal: {
+ // enabled: true,
+ // },
+ // customReporter: {
+ // enabled: true,
+ // },
wdio: {
enabled: false,
- services: [
- 'selenium-standalone',
- ],
+ services: ['selenium-standalone'],
},
stepByStepReport: {},
autoDelay: {
@@ -61,10 +60,8 @@ exports.config = {
subtitles: {
enabled: true,
},
- retryTo: {
- enabled: true,
- },
},
+
tests: './*_test.js',
// timeout: 100,
multiple: {
@@ -73,11 +70,8 @@ exports.config = {
},
default: {
grep: 'signin',
- browsers: [
- 'chrome',
- 'firefox',
- ],
+ browsers: ['chrome', 'firefox'],
},
},
name: 'tests',
-};
+}
diff --git a/examples/github_test.js b/examples/github_test.js
index e8f274c21..a2c66fa18 100644
--- a/examples/github_test.js
+++ b/examples/github_test.js
@@ -1,36 +1,36 @@
// /
-Feature('GitHub');
+Feature('GitHub')
Before(({ I }) => {
- I.amOnPage('https://github.com');
-});
+ I.amOnPage('https://github.com')
+ I.see('GitLab')
+})
xScenario('test ai features', ({ I }) => {
- I.amOnPage('https://getbootstrap.com/docs/5.1/examples/checkout/');
- pause();
-});
+ I.amOnPage('https://getbootstrap.com/docs/5.1/examples/checkout/')
+})
Scenario('Incorrect search for Codeceptjs', ({ I }) => {
- I.fillField('.search-input', 'CodeceptJS');
- I.pressKey('Enter');
- I.waitForElement('[data-testid=search-sub-header]', 10);
- I.see('Supercharged End 2 End Testing');
-});
+ I.fillField('.search-input', 'CodeceptJS')
+ I.pressKey('Enter')
+ I.waitForElement('[data-testid=search-sub-header]', 10)
+ I.see('Supercharged End 2 End Testing')
+})
Scenario('Visit Home Page @retry', async ({ I }) => {
// .retry({ retries: 3, minTimeout: 1000 })
- I.retry(2).see('GitHub');
- I.retry(3).see('ALL');
- I.retry(2).see('IMAGES');
-});
+ I.retry(2).see('GitHub')
+ I.retry(3).see('ALL')
+ I.retry(2).see('IMAGES')
+})
Scenario('search @grop', { timeout: 6 }, ({ I }) => {
- I.amOnPage('https://github.com/search');
+ I.amOnPage('https://github.com/search')
const a = {
b: {
c: 'asdasdasd',
},
- };
+ }
const b = {
users: {
admin: {
@@ -42,35 +42,38 @@ Scenario('search @grop', { timeout: 6 }, ({ I }) => {
other: (world = '') => `Hello ${world}`,
},
urls: {},
- };
- I.fillField('Search GitHub', 'CodeceptJS');
+ }
+ I.fillField('Search GitHub', 'CodeceptJS')
// pause({ a, b });
- I.pressKey('Enter');
- I.wait(3);
+ I.pressKey('Enter')
+ I.wait(3)
// pause();
- I.see('Codeception/CodeceptJS', locate('.repo-list .repo-list-item').first());
-});
+ I.see('Codeception/CodeceptJS', locate('.repo-list .repo-list-item').first())
+})
Scenario('signin @sign', { timeout: 6 }, ({ I, loginPage }) => {
- I.say('it should not enter');
- loginPage.login('something@totest.com', '123456');
- I.see('Incorrect username or password.', '.flash-error');
-}).tag('normal').tag('important').tag('@slow');
+ I.say('it should not enter')
+ loginPage.login('something@totest.com', '123456')
+ I.see('Incorrect username or password.', '.flash-error')
+})
+ .tag('normal')
+ .tag('important')
+ .tag('@slow')
Scenario('signin2', { timeout: 1 }, ({ I, Smth }) => {
- Smth.openAndLogin();
- I.see('Incorrect username or password.', '.flash-error');
-});
+ Smth.openAndLogin()
+ I.see('Incorrect username or password.', '.flash-error')
+})
Scenario('register', ({ I }) => {
within('.js-signup-form', () => {
- I.fillField('user[login]', 'User');
- I.fillField('user[email]', 'user@user.com');
- I.fillField('user[password]', 'user@user.com');
- I.fillField('q', 'aaa');
- I.click('button');
- });
- I.see('There were problems creating your account.');
- I.click('Explore');
- I.seeInCurrentUrl('/explore');
-});
+ I.fillField('user[login]', 'User')
+ I.fillField('user[email]', 'user@user.com')
+ I.fillField('user[password]', 'user@user.com')
+ I.fillField('q', 'aaa')
+ I.click('button')
+ })
+ I.see('There were problems creating your account.')
+ I.click('Explore')
+ I.seeInCurrentUrl('/explore')
+})
diff --git a/examples/selenoid-example/browsers.json b/examples/selenoid-example/browsers.json
deleted file mode 100644
index d715f44cc..000000000
--- a/examples/selenoid-example/browsers.json
+++ /dev/null
@@ -1,22 +0,0 @@
-{
- "chrome": {
- "default": "latest",
- "versions": {
- "latest": {
- "image": "selenoid/chrome:latest",
- "port": "4444",
- "path": "/"
- }
- }
- },
- "firefox": {
- "default": "latest",
- "versions": {
- "latest": {
- "image": "selenoid/firefox:latest",
- "port": "4444",
- "path": "/wd/hub"
- }
- }
- }
-}
\ No newline at end of file
diff --git a/examples/selenoid-example/codecept.conf.js b/examples/selenoid-example/codecept.conf.js
deleted file mode 100644
index 59666c7e1..000000000
--- a/examples/selenoid-example/codecept.conf.js
+++ /dev/null
@@ -1,29 +0,0 @@
-exports.config = {
- tests: './*_test.js',
- output: './output',
- helpers: {
- WebDriver: {
- url: 'http://localhost',
- browser: 'chrome',
- },
- },
-
- plugins: {
- selenoid: {
- enabled: true,
- deletePassed: true,
- autoCreate: true,
- autoStart: true,
- sessionTimeout: '30m',
- enableVideo: true,
- enableLog: true,
- },
- allure: {
- enabled: false,
- },
- },
- include: {},
- bootstrap: null,
- mocha: {},
- name: 'example',
-};
diff --git a/examples/selenoid-example/git_test.js b/examples/selenoid-example/git_test.js
deleted file mode 100644
index 1727c1bdc..000000000
--- a/examples/selenoid-example/git_test.js
+++ /dev/null
@@ -1,16 +0,0 @@
-Feature('Git');
-
-Scenario('Demo Test Github', ({ I }) => {
- I.amOnPage('https://github.com/login');
- I.see('GitHub');
- I.fillField('login', 'randomuser_kmk');
- I.fillField('password', 'randomuser_kmk');
- I.click('Sign in');
- I.see('Repositories');
-});
-
-Scenario('Demo Test GitLab', ({ I }) => {
- I.amOnPage('https://gitlab.com');
- I.dontSee('GitHub');
- I.see('GitLab');
-});
diff --git a/lib/actor.js b/lib/actor.js
index ff3a54050..081dae75e 100644
--- a/lib/actor.js
+++ b/lib/actor.js
@@ -3,8 +3,7 @@ const MetaStep = require('./step/meta')
const recordStep = require('./step/record')
const container = require('./container')
const { methodsOfObject } = require('./utils')
-const { TIMEOUT_ORDER } = require('./step/timeout')
-const recorder = require('./recorder')
+const { TIMEOUT_ORDER } = require('./timeout')
const event = require('./event')
const store = require('./store')
const output = require('./output')
diff --git a/lib/ai.js b/lib/ai.js
index 86dffcd3b..2104dafec 100644
--- a/lib/ai.js
+++ b/lib/ai.js
@@ -1,40 +1,44 @@
-const debug = require('debug')('codeceptjs:ai');
-const output = require('./output');
-const event = require('./event');
-const { removeNonInteractiveElements, minifyHtml, splitByChunks } = require('./html');
+const debug = require('debug')('codeceptjs:ai')
+const output = require('./output')
+const event = require('./event')
+const { removeNonInteractiveElements, minifyHtml, splitByChunks } = require('./html')
const defaultHtmlConfig = {
maxLength: 50000,
simplify: true,
minify: true,
html: {},
-};
+}
const defaultPrompts = {
- writeStep: (html, input) => [{
- role: 'user',
- content: `I am test engineer writing test in CodeceptJS
+ writeStep: (html, input) => [
+ {
+ role: 'user',
+ content: `I am test engineer writing test in CodeceptJS
I have opened web page and I want to use CodeceptJS to ${input} on this page
Provide me valid CodeceptJS code to accomplish it
Use only locators from this HTML: \n\n${html}`,
- },
+ },
],
healStep: (html, { step, error, prevSteps }) => {
- return [{
- role: 'user',
- content: `As a test automation engineer I am testing web application using CodeceptJS.
+ return [
+ {
+ role: 'user',
+ content: `As a test automation engineer I am testing web application using CodeceptJS.
I want to heal a test that fails. Here is the list of executed steps: ${prevSteps.map(s => s.toString()).join(', ')}
Propose how to adjust ${step.toCode()} step to fix the test.
Use locators in order of preference: semantic locator by text, CSS, XPath. Use codeblocks marked with \`\`\`
Here is the error message: ${error.message}
Here is HTML code of a page where the failure has happened: \n\n${html}`,
- }];
+ },
+ ]
},
- generatePageObject: (html, extraPrompt = '', rootLocator = null) => [{
- role: 'user',
- content: `As a test automation engineer I am creating a Page Object for a web application using CodeceptJS.
+ generatePageObject: (html, extraPrompt = '', rootLocator = null) => [
+ {
+ role: 'user',
+ content: `As a test automation engineer I am creating a Page Object for a web application using CodeceptJS.
Here is an sample page object:
const { I } = inject();
@@ -60,72 +64,73 @@ module.exports = {
${extraPrompt}
${rootLocator ? `All provided elements are inside '${rootLocator}'. Declare it as root variable and for every locator use locate(...).inside(root)` : ''}
Add only locators from this HTML: \n\n${html}`,
- }],
-};
+ },
+ ],
+}
class AiAssistant {
constructor() {
- this.totalTime = 0;
- this.numTokens = 0;
+ this.totalTime = 0
+ this.numTokens = 0
- this.reset();
- this.connectToEvents();
+ this.reset()
+ this.connectToEvents()
}
enable(config = {}) {
- debug('Enabling AI assistant');
- this.isEnabled = true;
+ debug('Enabling AI assistant')
+ this.isEnabled = true
- const { html, prompts, ...aiConfig } = config;
+ const { html, prompts, ...aiConfig } = config
- this.config = Object.assign(this.config, aiConfig);
- this.htmlConfig = Object.assign(defaultHtmlConfig, html);
- this.prompts = Object.assign(defaultPrompts, prompts);
+ this.config = Object.assign(this.config, aiConfig)
+ this.htmlConfig = Object.assign(defaultHtmlConfig, html)
+ this.prompts = Object.assign(defaultPrompts, prompts)
- debug('Config', this.config);
+ debug('Config', this.config)
}
reset() {
- this.numTokens = 0;
- this.isEnabled = false;
+ this.numTokens = 0
+ this.isEnabled = false
this.config = {
maxTokens: 1000000,
request: null,
response: parseCodeBlocks,
// lets limit token usage to 1M
- };
- this.minifiedHtml = null;
- this.response = null;
- this.totalTime = 0;
+ }
+ this.minifiedHtml = null
+ this.response = null
+ this.totalTime = 0
}
disable() {
- this.isEnabled = false;
+ this.isEnabled = false
}
connectToEvents() {
event.dispatcher.on(event.all.result, () => {
if (this.isEnabled && this.numTokens > 0) {
- const numTokensK = Math.ceil(this.numTokens / 1000);
- const maxTokensK = Math.ceil(this.config.maxTokens / 1000);
- output.print(`AI assistant took ${this.totalTime}s and used ~${numTokensK}K input tokens. Tokens limit: ${maxTokensK}K`);
+ const numTokensK = Math.ceil(this.numTokens / 1000)
+ const maxTokensK = Math.ceil(this.config.maxTokens / 1000)
+ output.print(`AI assistant took ${this.totalTime}s and used ~${numTokensK}K input tokens. Tokens limit: ${maxTokensK}K`)
}
- });
+ })
}
checkRequestFn() {
if (!this.isEnabled) {
- debug('AI assistant is disabled');
- return;
+ debug('AI assistant is disabled')
+ return
}
- if (this.config.request) return;
+ if (this.config.request) return
const noRequestErrorMessage = `
- No request function is set for AI assistant.
- Please implement your own request function and set it in the config.
+ No request function is set for AI assistant.
- [!] AI request was decoupled from CodeceptJS. To connect to OpenAI or other AI service, please implement your own request function and set it in the config.
+ [!] AI request was decoupled from CodeceptJS. To connect to OpenAI or other AI service.
+ Please implement your own request function and set it in the config.
Example (connect to OpenAI):
@@ -134,82 +139,80 @@ class AiAssistant {
const OpenAI = require('openai');
const openai = new OpenAI({ apiKey: process.env['OPENAI_API_KEY'] })
const response = await openai.chat.completions.create({
- model: 'gpt-3.5-turbo-0125',
+ model: 'gpt-4o-mini',
messages,
});
return response?.data?.choices[0]?.message?.content;
}
}
- `.trim();
+ `.trim()
- throw new Error(noRequestErrorMessage);
+ throw new Error(noRequestErrorMessage)
}
async setHtmlContext(html) {
- let processedHTML = html;
+ let processedHTML = html
if (this.htmlConfig.simplify) {
- processedHTML = removeNonInteractiveElements(processedHTML, this.htmlConfig);
+ processedHTML = removeNonInteractiveElements(processedHTML, this.htmlConfig)
}
- if (this.htmlConfig.minify) processedHTML = await minifyHtml(processedHTML);
- if (this.htmlConfig.maxLength) processedHTML = splitByChunks(processedHTML, this.htmlConfig.maxLength)[0];
+ if (this.htmlConfig.minify) processedHTML = await minifyHtml(processedHTML)
+ if (this.htmlConfig.maxLength) processedHTML = splitByChunks(processedHTML, this.htmlConfig.maxLength)[0]
- this.minifiedHtml = processedHTML;
+ this.minifiedHtml = processedHTML
}
getResponse() {
- return this.response || '';
+ return this.response || ''
}
async createCompletion(messages) {
- if (!this.isEnabled) return '';
-
- debug('Request', messages);
-
- this.checkRequestFn();
-
- this.response = null;
-
- this.calculateTokens(messages);
+ if (!this.isEnabled) return ''
try {
- const startTime = process.hrtime();
- this.response = await this.config.request(messages);
- const endTime = process.hrtime(startTime);
- const executionTimeInSeconds = endTime[0] + endTime[1] / 1e9;
-
- this.totalTime += Math.round(executionTimeInSeconds);
- debug('AI response time', executionTimeInSeconds);
- debug('Response', this.response);
- this.stopWhenReachingTokensLimit();
- return this.response;
+ this.checkRequestFn()
+ debug('Request', messages)
+
+ this.response = null
+
+ this.calculateTokens(messages)
+ const startTime = process.hrtime()
+ this.response = await this.config.request(messages)
+ const endTime = process.hrtime(startTime)
+ const executionTimeInSeconds = endTime[0] + endTime[1] / 1e9
+
+ this.totalTime += Math.round(executionTimeInSeconds)
+ debug('AI response time', executionTimeInSeconds)
+ debug('Response', this.response)
+ this.stopWhenReachingTokensLimit()
+ return this.response
} catch (err) {
- debug(err.response);
- output.print('');
- output.error(`AI service error: ${err.message}`);
- if (err?.response?.data?.error?.code) output.error(err?.response?.data?.error?.code);
- if (err?.response?.data?.error?.message) output.error(err?.response?.data?.error?.message);
- this.stopWhenReachingTokensLimit();
- return '';
+ debug(err.response)
+ output.print('')
+ output.error(`AI service error: ${err.message}`)
+ if (err?.response?.data?.error?.code) output.error(err?.response?.data?.error?.code)
+ if (err?.response?.data?.error?.message) output.error(err?.response?.data?.error?.message)
+ this.stopWhenReachingTokensLimit()
+ return ''
}
}
async healFailedStep(failureContext) {
- if (!this.isEnabled) return [];
- if (!failureContext.html) throw new Error('No HTML context provided');
+ if (!this.isEnabled) return []
+ if (!failureContext.html) throw new Error('No HTML context provided')
- await this.setHtmlContext(failureContext.html);
+ await this.setHtmlContext(failureContext.html)
if (!this.minifiedHtml) {
- debug('HTML context is empty after removing non-interactive elements & minification');
- return [];
+ debug('HTML context is empty after removing non-interactive elements & minification')
+ return []
}
- const response = await this.createCompletion(this.prompts.healStep(this.minifiedHtml, failureContext));
- if (!response) return [];
+ const response = await this.createCompletion(this.prompts.healStep(this.minifiedHtml, failureContext))
+ if (!response) return []
- return this.config.response(response);
+ return this.config.response(response)
}
/**
@@ -219,13 +222,13 @@ class AiAssistant {
* @returns
*/
async generatePageObject(extraPrompt = null, locator = null) {
- if (!this.isEnabled) return [];
- if (!this.minifiedHtml) throw new Error('No HTML context provided');
+ if (!this.isEnabled) return []
+ if (!this.minifiedHtml) throw new Error('No HTML context provided')
- const response = await this.createCompletion(this.prompts.generatePageObject(this.minifiedHtml, locator, extraPrompt));
- if (!response) return [];
+ const response = await this.createCompletion(this.prompts.generatePageObject(this.minifiedHtml, locator, extraPrompt))
+ if (!response) return []
- return this.config.response(response);
+ return this.config.response(response)
}
calculateTokens(messages) {
@@ -233,66 +236,72 @@ class AiAssistant {
// this approach was tested via https://platform.openai.com/tokenizer
// we need it to display current tokens usage so users could analyze effectiveness of AI
- const inputString = messages.map(m => m.content).join(' ').trim();
- const numWords = (inputString.match(/[^\s\-:=]+/g) || []).length;
+ const inputString = messages
+ .map(m => m.content)
+ .join(' ')
+ .trim()
+ const numWords = (inputString.match(/[^\s\-:=]+/g) || []).length
// 2.5 token is constant for average HTML input
- const tokens = numWords * 2.5;
+ const tokens = numWords * 2.5
- this.numTokens += tokens;
+ this.numTokens += tokens
- return tokens;
+ return tokens
}
stopWhenReachingTokensLimit() {
- if (this.numTokens < this.config.maxTokens) return;
+ if (this.numTokens < this.config.maxTokens) return
- output.print(`AI assistant has reached the limit of ${this.config.maxTokens} tokens in this session. It will be disabled now`);
- this.disable();
+ output.print(`AI assistant has reached the limit of ${this.config.maxTokens} tokens in this session. It will be disabled now`)
+ this.disable()
}
async writeSteps(input) {
- if (!this.isEnabled) return;
- if (!this.minifiedHtml) throw new Error('No HTML context provided');
+ if (!this.isEnabled) return
+ if (!this.minifiedHtml) throw new Error('No HTML context provided')
- const snippets = [];
+ const snippets = []
- const response = await this.createCompletion(this.prompts.writeStep(this.minifiedHtml, input));
- if (!response) return;
- snippets.push(...this.config.response(response));
+ const response = await this.createCompletion(this.prompts.writeStep(this.minifiedHtml, input))
+ if (!response) return
+ snippets.push(...this.config.response(response))
- debug(snippets[0]);
+ debug(snippets[0])
- return snippets[0];
+ return snippets[0]
}
}
function parseCodeBlocks(response) {
// Regular expression pattern to match code snippets
- const codeSnippetPattern = /```(?:javascript|js|typescript|ts)?\n([\s\S]+?)\n```/g;
+ const codeSnippetPattern = /```(?:javascript|js|typescript|ts)?\n([\s\S]+?)\n```/g
// Array to store extracted code snippets
- const codeSnippets = [];
+ const codeSnippets = []
- response = response.split('\n').map(line => line.trim()).join('\n');
+ response = response
+ .split('\n')
+ .map(line => line.trim())
+ .join('\n')
// Iterate over matches and extract code snippets
- let match;
+ let match
while ((match = codeSnippetPattern.exec(response)) !== null) {
- codeSnippets.push(match[1]);
+ codeSnippets.push(match[1])
}
// Remove "Scenario", "Feature", and "require()" lines
const modifiedSnippets = codeSnippets.map(snippet => {
- const lines = snippet.split('\n');
+ const lines = snippet.split('\n')
- const filteredLines = lines.filter(line => !line.includes('I.amOnPage') && !line.startsWith('Scenario') && !line.startsWith('Feature') && !line.includes('= require('));
+ const filteredLines = lines.filter(line => !line.includes('I.amOnPage') && !line.startsWith('Scenario') && !line.startsWith('Feature') && !line.includes('= require('))
- return filteredLines.join('\n');
+ return filteredLines.join('\n')
// remove snippets that move from current url
- }); // .filter(snippet => !line.includes('I.amOnPage'));
+ }) // .filter(snippet => !line.includes('I.amOnPage'));
- return modifiedSnippets.filter(snippet => !!snippet);
+ return modifiedSnippets.filter(snippet => !!snippet)
}
-module.exports = new AiAssistant();
+module.exports = new AiAssistant()
diff --git a/lib/codecept.js b/lib/codecept.js
index f0626cbcc..f1a76ca00 100644
--- a/lib/codecept.js
+++ b/lib/codecept.js
@@ -105,8 +105,8 @@ class Codecept {
// default hooks
runHook(require('./listener/store'))
runHook(require('./listener/steps'))
- runHook(require('./listener/artifacts'))
runHook(require('./listener/config'))
+ runHook(require('./listener/result'))
runHook(require('./listener/helpers'))
runHook(require('./listener/globalTimeout'))
runHook(require('./listener/globalRetry'))
@@ -199,13 +199,13 @@ class Codecept {
mocha.files = mocha.files.filter(t => fsPath.basename(t, '.js') === test || t === test)
}
const done = () => {
- event.emit(event.all.result, this)
- event.emit(event.all.after, this)
+ event.emit(event.all.result, container.result())
+ event.emit(event.all.after)
resolve()
}
try {
- event.emit(event.all.before, this)
+ event.emit(event.all.before)
mocha.run(() => done())
} catch (e) {
output.error(e.stack)
diff --git a/lib/command/check.js b/lib/command/check.js
index bfa45cc67..4c32ba359 100644
--- a/lib/command/check.js
+++ b/lib/command/check.js
@@ -22,6 +22,7 @@ module.exports = async function (options) {
config: false,
container: false,
pageObjects: false,
+ plugins: false,
helpers: false,
setup: false,
tests: false,
@@ -115,6 +116,9 @@ module.exports = async function (options) {
}
printCheck('page objects', checks['pageObjects'], `Total: ${Object.keys(pageObjects).length} support objects`)
+ checks.plugins = true // how to check plugins?
+ printCheck('plugins', checks['plugins'], Object.keys(container.plugins()).join(', '))
+
if (Object.keys(helpers).length) {
const suite = container.mocha().suite
const test = createTest('test', () => {})
diff --git a/lib/command/run-workers.js b/lib/command/run-workers.js
index 31a2c598e..20a26e2c8 100644
--- a/lib/command/run-workers.js
+++ b/lib/command/run-workers.js
@@ -8,12 +8,6 @@ const Workers = require('../workers')
module.exports = async function (workerCount, selectedRuns, options) {
process.env.profile = options.profile
- const suiteArr = []
- const passedTestArr = []
- const failedTestArr = []
- const skippedTestArr = []
- const stepArr = []
-
const { config: testConfig, override = '' } = options
const overrideConfigs = tryOrDefault(() => JSON.parse(override), {})
const by = options.suites ? 'suite' : 'test'
@@ -35,65 +29,19 @@ module.exports = async function (workerCount, selectedRuns, options) {
const workers = new Workers(numberOfWorkers, config)
workers.overrideConfig(overrideConfigs)
- workers.on(event.suite.before, suite => {
- suiteArr.push(suite)
- })
-
- workers.on(event.step.passed, step => {
- stepArr.push(step)
- })
-
- workers.on(event.step.failed, step => {
- stepArr.push(step)
- })
-
workers.on(event.test.failed, test => {
- failedTestArr.push(test)
output.test.failed(test)
})
workers.on(event.test.passed, test => {
- passedTestArr.push(test)
output.test.passed(test)
})
workers.on(event.test.skipped, test => {
- skippedTestArr.push(test)
output.test.skipped(test)
})
- workers.on(event.all.result, () => {
- // expose test stats after all workers finished their execution
- function addStepsToTest(test, stepArr) {
- stepArr.test.steps.forEach(step => {
- if (test.steps.length === 0) {
- test.steps.push(step)
- }
- })
- }
-
- stepArr.forEach(step => {
- passedTestArr.forEach(test => {
- if (step.test.title === test.title) {
- addStepsToTest(test, step)
- }
- })
-
- failedTestArr.forEach(test => {
- if (step.test.title === test.title) {
- addStepsToTest(test, step)
- }
- })
- })
-
- event.dispatcher.emit(event.workers.result, {
- suites: suiteArr,
- tests: {
- passed: passedTestArr,
- failed: failedTestArr,
- skipped: skippedTestArr,
- },
- })
+ workers.on(event.all.result, result => {
workers.printResults()
})
diff --git a/lib/command/workers/runTests.js b/lib/command/workers/runTests.js
index f32725d35..d6222575a 100644
--- a/lib/command/workers/runTests.js
+++ b/lib/command/workers/runTests.js
@@ -75,217 +75,53 @@ function filterTests() {
}
function initializeListeners() {
- function simplifyError(error) {
- if (error) {
- const { stack, uncaught, message, actual, expected } = error
-
- return {
- stack,
- uncaught,
- message,
- actual,
- expected,
- }
- }
-
- return null
- }
- function simplifyTest(test, err = null) {
- test = { ...test }
-
- if (test.start && !test.duration) {
- const end = new Date()
- test.duration = end - test.start
- }
-
- if (test.err) {
- err = simplifyError(test.err)
- test.status = 'failed'
- } else if (err) {
- err = simplifyError(err)
- test.status = 'failed'
- }
- const parent = {}
- if (test.parent) {
- parent.title = test.parent.title
- }
-
- if (test.opts) {
- Object.keys(test.opts).forEach(k => {
- if (typeof test.opts[k] === 'object') delete test.opts[k]
- if (typeof test.opts[k] === 'function') delete test.opts[k]
- })
- }
-
- return {
- opts: test.opts || {},
- tags: test.tags || [],
- uid: test.uid,
- workerIndex,
- retries: test._retries,
- title: test.title,
- status: test.status,
- notes: test.notes || [],
- meta: test.meta || {},
- artifacts: test.artifacts || [],
- duration: test.duration || 0,
- err,
- parent,
- steps: test.steps && test.steps.length > 0 ? simplifyStepsInTestObject(test.steps, err) : [],
- }
- }
-
- function simplifyStepsInTestObject(steps, err) {
- steps = [...steps]
- const _steps = []
-
- for (step of steps) {
- const _args = []
-
- if (step.args) {
- for (const arg of step.args) {
- // check if arg is a JOI object
- if (arg && arg.$_root) {
- _args.push(JSON.stringify(arg).slice(0, 300))
- // check if arg is a function
- } else if (arg && typeof arg === 'function') {
- _args.push(arg.name)
- } else {
- _args.push(arg)
- }
- }
- }
-
- _steps.push({
- actor: step.actor,
- name: step.name,
- status: step.status,
- args: JSON.stringify(_args),
- startedAt: step.startedAt,
- startTime: step.startTime,
- endTime: step.endTime,
- finishedAt: step.finishedAt,
- duration: step.duration,
- err,
- })
- }
-
- return _steps
- }
-
- function simplifyStep(step, err = null) {
- step = { ...step }
-
- if (step.startTime && !step.duration) {
- const end = new Date()
- step.duration = end - step.startTime
- }
-
- if (step.err) {
- err = simplifyError(step.err)
- step.status = 'failed'
- } else if (err) {
- err = simplifyError(err)
- step.status = 'failed'
- }
-
- const parent = {}
- if (step.metaStep) {
- parent.title = step.metaStep.actor
- }
-
- if (step.opts) {
- Object.keys(step.opts).forEach(k => {
- if (typeof step.opts[k] === 'object') delete step.opts[k]
- if (typeof step.opts[k] === 'function') delete step.opts[k]
- })
- }
-
- return {
- opts: step.opts || {},
- workerIndex,
- title: step.name,
- status: step.status,
- duration: step.duration || 0,
- err,
- parent,
- test: simplifyTest(step.test),
- }
- }
-
- collectStats()
// suite
- event.dispatcher.on(event.suite.before, suite => sendToParentThread({ event: event.suite.before, workerIndex, data: simplifyTest(suite) }))
- event.dispatcher.on(event.suite.after, suite => sendToParentThread({ event: event.suite.after, workerIndex, data: simplifyTest(suite) }))
+ event.dispatcher.on(event.suite.before, suite => sendToParentThread({ event: event.suite.before, workerIndex, data: suite.simplify() }))
+ event.dispatcher.on(event.suite.after, suite => sendToParentThread({ event: event.suite.after, workerIndex, data: suite.simplify() }))
// calculate duration
event.dispatcher.on(event.test.started, test => (test.start = new Date()))
// tests
- event.dispatcher.on(event.test.before, test => sendToParentThread({ event: event.test.before, workerIndex, data: simplifyTest(test) }))
- event.dispatcher.on(event.test.after, test => sendToParentThread({ event: event.test.after, workerIndex, data: simplifyTest(test) }))
+ event.dispatcher.on(event.test.before, test => sendToParentThread({ event: event.test.before, workerIndex, data: test.simplify() }))
+ event.dispatcher.on(event.test.after, test => sendToParentThread({ event: event.test.after, workerIndex, data: test.simplify() }))
// we should force-send correct errors to prevent race condition
- event.dispatcher.on(event.test.finished, (test, err) => sendToParentThread({ event: event.test.finished, workerIndex, data: simplifyTest(test, err) }))
- event.dispatcher.on(event.test.failed, (test, err) => sendToParentThread({ event: event.test.failed, workerIndex, data: simplifyTest(test, err) }))
- event.dispatcher.on(event.test.passed, (test, err) => sendToParentThread({ event: event.test.passed, workerIndex, data: simplifyTest(test, err) }))
- event.dispatcher.on(event.test.started, test => sendToParentThread({ event: event.test.started, workerIndex, data: simplifyTest(test) }))
- event.dispatcher.on(event.test.skipped, test => sendToParentThread({ event: event.test.skipped, workerIndex, data: simplifyTest(test) }))
+ event.dispatcher.on(event.test.finished, (test, err) => sendToParentThread({ event: event.test.finished, workerIndex, data: { ...test.simplify(), err } }))
+ event.dispatcher.on(event.test.failed, (test, err) => sendToParentThread({ event: event.test.failed, workerIndex, data: { ...test.simplify(), err } }))
+ event.dispatcher.on(event.test.passed, (test, err) => sendToParentThread({ event: event.test.passed, workerIndex, data: { ...test.simplify(), err } }))
+ event.dispatcher.on(event.test.started, test => sendToParentThread({ event: event.test.started, workerIndex, data: test.simplify() }))
+ event.dispatcher.on(event.test.skipped, test => sendToParentThread({ event: event.test.skipped, workerIndex, data: test.simplify() }))
// steps
- event.dispatcher.on(event.step.finished, step => sendToParentThread({ event: event.step.finished, workerIndex, data: simplifyStep(step) }))
- event.dispatcher.on(event.step.started, step => sendToParentThread({ event: event.step.started, workerIndex, data: simplifyStep(step) }))
- event.dispatcher.on(event.step.passed, step => sendToParentThread({ event: event.step.passed, workerIndex, data: simplifyStep(step) }))
- event.dispatcher.on(event.step.failed, step => sendToParentThread({ event: event.step.failed, workerIndex, data: simplifyStep(step) }))
+ event.dispatcher.on(event.step.finished, step => sendToParentThread({ event: event.step.finished, workerIndex, data: step.simplify() }))
+ event.dispatcher.on(event.step.started, step => sendToParentThread({ event: event.step.started, workerIndex, data: step.simplify() }))
+ event.dispatcher.on(event.step.passed, step => sendToParentThread({ event: event.step.passed, workerIndex, data: step.simplify() }))
+ event.dispatcher.on(event.step.failed, step => sendToParentThread({ event: event.step.failed, workerIndex, data: step.simplify() }))
- event.dispatcher.on(event.hook.failed, (test, err) => sendToParentThread({ event: event.hook.failed, workerIndex, data: simplifyTest(test, err) }))
- event.dispatcher.on(event.hook.passed, (test, err) => sendToParentThread({ event: event.hook.passed, workerIndex, data: simplifyTest(test, err) }))
- event.dispatcher.on(event.all.failures, data => sendToParentThread({ event: event.all.failures, workerIndex, data }))
+ event.dispatcher.on(event.hook.failed, (hook, err) => sendToParentThread({ event: event.hook.failed, workerIndex, data: { ...hook.simplify(), err } }))
+ event.dispatcher.on(event.hook.passed, hook => sendToParentThread({ event: event.hook.passed, workerIndex, data: hook.simplify() }))
+ event.dispatcher.on(event.hook.finished, hook => sendToParentThread({ event: event.hook.finished, workerIndex, data: hook.simplify() }))
+ event.dispatcher.once(event.all.after, () => {
+ sendToParentThread({ event: event.all.after, workerIndex, data: container.result().simplify() })
+ })
// all
- event.dispatcher.once(event.all.result, () => parentPort.close())
+ event.dispatcher.once(event.all.result, () => {
+ sendToParentThread({ event: event.all.result, workerIndex, data: container.result().simplify() })
+ parentPort?.close()
+ })
}
function disablePause() {
global.pause = () => {}
}
-function collectStats() {
- const stats = {
- passes: 0,
- failures: 0,
- skipped: 0,
- tests: 0,
- pending: 0,
- }
- event.dispatcher.on(event.test.skipped, () => {
- stats.skipped++
- })
- event.dispatcher.on(event.test.passed, () => {
- stats.passes++
- })
- event.dispatcher.on(event.test.failed, test => {
- if (test.ctx._runnable.title.includes('hook: AfterSuite')) {
- stats.failedHooks += 1
- }
- stats.failures++
- })
- event.dispatcher.on(event.test.skipped, () => {
- stats.pending++
- })
- event.dispatcher.on(event.test.finished, () => {
- stats.tests++
- })
- event.dispatcher.once(event.all.after, () => {
- sendToParentThread({ event: event.all.after, data: stats })
- })
-}
-
function sendToParentThread(data) {
- parentPort.postMessage(data)
+ parentPort?.postMessage(data)
}
function listenToParentThread() {
- parentPort.on('message', eventData => {
+ parentPort?.on('message', eventData => {
container.append({ support: eventData.data })
})
}
diff --git a/lib/container.js b/lib/container.js
index d8d25ebfd..6a041bb14 100644
--- a/lib/container.js
+++ b/lib/container.js
@@ -9,6 +9,7 @@ const recorder = require('./recorder')
const event = require('./event')
const WorkerStorage = require('./workerStorage')
const store = require('./store')
+const Result = require('./result')
const ai = require('./ai')
let asyncHelperPromise
@@ -25,6 +26,8 @@ let container = {
*/
mocha: {},
translation: {},
+ /** @type {Result | null} */
+ result: null,
}
/**
@@ -54,6 +57,7 @@ class Container {
container.translation = loadTranslation(config.translation || null, config.vocabularies || [])
container.proxySupport = createSupportObjects(config.include || {})
container.plugins = createPlugins(config.plugins || {}, opts)
+ container.result = new Result()
createActor(config.include?.I)
@@ -127,6 +131,18 @@ class Container {
return container.mocha
}
+ /**
+ * Get result
+ *
+ * @returns {Result}
+ */
+ static result() {
+ if (!container.result) {
+ container.result = new Result()
+ }
+ return container.result
+ }
+
/**
* Append new services to container
*
diff --git a/lib/event.js b/lib/event.js
index d7cc05046..4dd500d8f 100644
--- a/lib/event.js
+++ b/lib/event.js
@@ -1,10 +1,10 @@
-const debug = require('debug')('codeceptjs:event');
-const events = require('events');
-const { error } = require('./output');
+const debug = require('debug')('codeceptjs:event')
+const events = require('events')
+const { error } = require('./output')
-const dispatcher = new events.EventEmitter();
+const dispatcher = new events.EventEmitter()
-dispatcher.setMaxListeners(50);
+dispatcher.setMaxListeners(50)
/**
* @namespace
* @alias event
@@ -59,6 +59,7 @@ module.exports = {
started: 'hook.start',
passed: 'hook.passed',
failed: 'hook.failed',
+ finished: 'hook.finished',
},
/**
@@ -141,33 +142,33 @@ module.exports = {
* @param {*} [param]
*/
emit(event, param) {
- let msg = `Emitted | ${event}`;
+ let msg = `Emitted | ${event}`
if (param && param.toString()) {
- msg += ` (${param.toString()})`;
+ msg += ` (${param.toString()})`
}
- debug(msg);
+ debug(msg)
try {
- this.dispatcher.emit.apply(this.dispatcher, arguments);
+ this.dispatcher.emit.apply(this.dispatcher, arguments)
} catch (err) {
- error(`Error processing ${event} event:`);
- error(err.stack);
+ error(`Error processing ${event} event:`)
+ error(err.stack)
}
},
/** for testing only! */
cleanDispatcher: () => {
- let event;
+ let event
for (event in this.test) {
- this.dispatcher.removeAllListeners(this.test[event]);
+ this.dispatcher.removeAllListeners(this.test[event])
}
for (event in this.suite) {
- this.dispatcher.removeAllListeners(this.test[event]);
+ this.dispatcher.removeAllListeners(this.test[event])
}
for (event in this.step) {
- this.dispatcher.removeAllListeners(this.test[event]);
+ this.dispatcher.removeAllListeners(this.test[event])
}
for (event in this.all) {
- this.dispatcher.removeAllListeners(this.test[event]);
+ this.dispatcher.removeAllListeners(this.test[event])
}
},
-};
+}
diff --git a/lib/listener/artifacts.js b/lib/listener/artifacts.js
deleted file mode 100644
index da4cddeb7..000000000
--- a/lib/listener/artifacts.js
+++ /dev/null
@@ -1,19 +0,0 @@
-const event = require('../event')
-const recorder = require('../recorder')
-
-/**
- * Create and clean up empty artifacts
- */
-module.exports = function () {
- event.dispatcher.on(event.test.before, test => {
- test.artifacts = {}
- })
-
- event.dispatcher.on(event.test.after, test => {
- recorder.add('clean up empty artifacts', () => {
- for (const key in test.artifacts || {}) {
- if (!test.artifacts[key]) delete test.artifacts[key]
- }
- })
- })
-}
diff --git a/lib/listener/exit.js b/lib/listener/exit.js
index 10cd8cd23..c510a3c16 100644
--- a/lib/listener/exit.js
+++ b/lib/listener/exit.js
@@ -1,20 +1,17 @@
const event = require('../event')
+const debug = require('debug')('codeceptjs:exit')
module.exports = function () {
let failedTests = []
- event.dispatcher.on(event.test.failed, testOrSuite => {
- // NOTE When an error happens in one of the hooks (BeforeAll/BeforeEach...) the event object
- // is a suite and not a test
- const id = testOrSuite.uid || (testOrSuite.ctx && testOrSuite.ctx.test.uid) || 'empty'
+ event.dispatcher.on(event.test.failed, test => {
+ const id = test.uid || (test.ctx && test.ctx.test.uid) || 'empty'
failedTests.push(id)
})
// if test was successful after retries
- event.dispatcher.on(event.test.passed, testOrSuite => {
- // NOTE When an error happens in one of the hooks (BeforeAll/BeforeEach...) the event object
- // is a suite and not a test
- const id = testOrSuite.uid || (testOrSuite.ctx && testOrSuite.ctx.test.uid) || 'empty'
+ event.dispatcher.on(event.test.passed, test => {
+ const id = test.uid || (test.ctx && test.ctx.test.uid) || 'empty'
failedTests = failedTests.filter(failed => id !== failed)
})
diff --git a/lib/listener/globalTimeout.js b/lib/listener/globalTimeout.js
index 6bb7dd1fe..07bed2807 100644
--- a/lib/listener/globalTimeout.js
+++ b/lib/listener/globalTimeout.js
@@ -4,7 +4,7 @@ const recorder = require('../recorder')
const Config = require('../config')
const store = require('../store')
const debug = require('debug')('codeceptjs:timeout')
-const { TIMEOUT_ORDER } = require('../step/timeout')
+const { TIMEOUT_ORDER, TimeoutError, TestTimeoutError, StepTimeoutError } = require('../timeout')
const { BeforeSuiteHook, AfterSuiteHook } = require('../mocha/hooks')
module.exports = function () {
@@ -119,24 +119,41 @@ module.exports = function () {
}
})
+ event.dispatcher.on(event.step.after, step => {
+ if (typeof timeout !== 'number') return
+ if (!store.timeouts) return
+
+ recorder.catchWithoutStop(err => {
+ // we wrap timeout errors in a StepTimeoutError
+ // but only if global timeout is set
+ // should we wrap all timeout errors?
+ if (err instanceof TimeoutError) {
+ const testTimeoutExceeded = timeout && +Date.now() - step.startTime >= timeout
+ debug('Step failed due to global test or suite timeout')
+ if (testTimeoutExceeded) {
+ debug('Test failed due to global test or suite timeout')
+ throw new TestTimeoutError(currentTimeout)
+ }
+ throw new StepTimeoutError(currentTimeout, step)
+ }
+ throw err
+ })
+ })
+
event.dispatcher.on(event.step.finished, step => {
if (!store.timeouts) {
debug('step', step.toCode().trim(), 'timeout disabled')
return
}
+ if (typeof timeout === 'number') debug('Timeout', timeout)
+
+ debug(`step ${step.toCode().trim()}:${step.status} duration`, step.duration)
if (typeof timeout === 'number' && !Number.isNaN(timeout)) timeout -= step.duration
if (typeof timeout === 'number' && timeout <= 0 && recorder.isRunning()) {
debug(`step ${step.toCode().trim()} timed out`)
- if (currentTest && currentTest.callback) {
- debug(`Failing test ${currentTest.title} with timeout ${currentTimeout}s`)
- recorder.reset()
- // replace mocha timeout with custom timeout
- currentTest.timeout(0)
- currentTest.callback(new Error(`Timeout ${currentTimeout}s exceeded (with Before hook)`))
- currentTest.timedOut = true
- }
+ recorder.throw(new TestTimeoutError(currentTimeout))
}
})
}
diff --git a/lib/listener/result.js b/lib/listener/result.js
new file mode 100644
index 000000000..07ca1045c
--- /dev/null
+++ b/lib/listener/result.js
@@ -0,0 +1,12 @@
+const event = require('../event')
+const container = require('../container')
+
+module.exports = function () {
+ event.dispatcher.on(event.hook.failed, err => {
+ container.result().addStats({ failedHooks: 1 })
+ })
+
+ event.dispatcher.on(event.test.before, test => {
+ container.result().addTest(test)
+ })
+}
diff --git a/lib/listener/steps.js b/lib/listener/steps.js
index 85750f278..bcfb1b1ec 100644
--- a/lib/listener/steps.js
+++ b/lib/listener/steps.js
@@ -2,15 +2,20 @@ const debug = require('debug')('codeceptjs:steps')
const event = require('../event')
const store = require('../store')
const output = require('../output')
+const { BeforeHook, AfterHook, BeforeSuiteHook, AfterSuiteHook } = require('../mocha/hooks')
let currentTest
let currentHook
/**
+ * Register steps inside tests
*/
module.exports = function () {
- event.dispatcher.on(event.test.started, test => {
+ event.dispatcher.on(event.test.before, test => {
test.startedAt = +new Date()
+ })
+
+ event.dispatcher.on(event.test.started, test => {
currentTest = test
currentTest.steps = []
if (!('retryNum' in currentTest)) currentTest.retryNum = 0
@@ -30,13 +35,13 @@ module.exports = function () {
output.hook.started(hook)
- if (hook.ctx && hook.ctx.test) debug(`--- STARTED ${hook.title} ---`)
+ if (hook.ctx && hook.ctx.test) debug(`--- STARTED ${hook.ctx.test.title} ---`)
})
event.dispatcher.on(event.hook.passed, hook => {
currentHook = null
output.hook.passed(hook)
- if (hook.ctx && hook.ctx.test) debug(`--- ENDED ${hook.title} ---`)
+ if (hook.ctx && hook.ctx.test) debug(`--- ENDED ${hook.ctx.test.title} ---`)
})
event.dispatcher.on(event.test.failed, () => {
@@ -65,8 +70,6 @@ module.exports = function () {
})
event.dispatcher.on(event.step.started, step => {
- step.startedAt = +new Date()
- step.test = currentTest
store.currentStep = step
if (currentHook && Array.isArray(currentHook.steps)) {
return currentHook.steps.push(step)
@@ -76,67 +79,7 @@ module.exports = function () {
})
event.dispatcher.on(event.step.finished, step => {
- step.finishedAt = +new Date()
- if (step.startedAt) step.duration = step.finishedAt - step.startedAt
- debug(`Step '${step}' finished; Duration: ${step.duration || 0}ms`)
store.currentStep = null
store.stepOptions = null
})
-
- // listeners to output steps
- let currentMetaStep = []
-
- event.dispatcher.on(event.bddStep.started, step => {
- if (!printSteps()) return
-
- output.stepShift = 2
- output.step(step)
- })
-
- event.dispatcher.on(event.step.started, step => {
- if (!printSteps()) return
-
- let processingStep = step
- const metaSteps = []
- let isHidden = false
- while (processingStep.metaStep) {
- metaSteps.unshift(processingStep.metaStep)
- processingStep = processingStep.metaStep
- if (processingStep.collapsed) isHidden = true
- }
- const shift = metaSteps.length
-
- for (let i = 0; i < Math.max(currentMetaStep.length, metaSteps.length); i++) {
- if (currentMetaStep[i] !== metaSteps[i]) {
- output.stepShift = 3 + 2 * i
- if (!metaSteps[i]) continue
- // bdd steps are handled by bddStep.started
- if (metaSteps[i].isBDD()) continue
- output.step(metaSteps[i])
- }
- }
- currentMetaStep = metaSteps
-
- if (isHidden) return
- output.stepShift = 3 + 2 * shift
- output.step(step)
- })
-
- event.dispatcher.on(event.step.finished, () => {
- if (!printSteps()) return
- output.stepShift = 0
- })
-}
-
-let areStepsPrinted = false
-function printSteps() {
- if (output.level() < 1) return false
-
- // if executed first time, print debug message
- if (!areStepsPrinted) {
- debug('Printing steps', 'Output level', output.level())
- areStepsPrinted = true
- }
-
- return true
}
diff --git a/lib/mocha/asyncWrapper.js b/lib/mocha/asyncWrapper.js
index 68902a912..560776ed6 100644
--- a/lib/mocha/asyncWrapper.js
+++ b/lib/mocha/asyncWrapper.js
@@ -13,12 +13,19 @@ const injectHook = function (inject, suite) {
recorder.throw(err)
}
recorder.catch(err => {
- event.emit(event.test.failed, suite, err)
+ suiteTestFailedHookError(suite, err)
throw err
})
return recorder.promise()
}
+function suiteTestFailedHookError(suite, err) {
+ suite.eachTest(test => {
+ test.err = err
+ event.emit(event.test.failed, test, err)
+ })
+}
+
function makeDoneCallableOnce(done) {
let called = false
return function (err) {
@@ -61,6 +68,7 @@ module.exports.test = test => {
err = newErr
}
}
+ test.err = err
event.emit(event.test.failed, test, err)
event.emit(event.test.finished, test)
recorder.add(() => doneFn(err))
@@ -112,7 +120,7 @@ module.exports.injected = function (fn, suite, hookName) {
const errHandler = err => {
recorder.session.start('teardown')
recorder.cleanAsyncErr()
- event.emit(event.test.failed, suite, err)
+ if (hookName == 'before' || hookName == 'beforeSuite') suiteTestFailedHookError(suite, err)
if (hookName === 'after') event.emit(event.test.after, suite)
if (hookName === 'afterSuite') event.emit(event.suite.after, suite)
recorder.add(() => doneFn(err))
@@ -156,6 +164,7 @@ module.exports.injected = function (fn, suite, hookName) {
)
.then(() => {
recorder.add('fire hook.passed', () => fireHook(event.hook.passed, suite))
+ recorder.add('fire hook.finished', () => fireHook(event.hook.finished, suite))
recorder.add(`finish ${hookName} hook`, doneFn)
recorder.catch()
})
@@ -166,6 +175,7 @@ module.exports.injected = function (fn, suite, hookName) {
errHandler(err)
})
recorder.add('fire hook.failed', () => fireHook(event.hook.failed, suite, e))
+ recorder.add('fire hook.finished', () => fireHook(event.hook.finished, suite))
})
}
}
diff --git a/lib/mocha/cli.js b/lib/mocha/cli.js
index e9a0ffa32..313bb8834 100644
--- a/lib/mocha/cli.js
+++ b/lib/mocha/cli.js
@@ -1,12 +1,12 @@
const {
reporters: { Base },
} = require('mocha')
-const figures = require('figures')
const ms = require('ms')
+const figures = require('figures')
const event = require('../event')
const AssertionFailedError = require('../assert/error')
const output = require('../output')
-
+const { cloneTest } = require('./test')
const cursor = Base.cursor
let currentMetaStep = []
let codeceptjsEventDispatchersRegistered = false
@@ -32,6 +32,16 @@ class Cli extends Base {
output.print(output.styles.debug(`Plugins: ${Object.keys(Containter.plugins()).join(', ')}`))
}
+ if (level >= 3) {
+ process.on('warning', warning => {
+ console.log('\nWarning Details:')
+ console.log('Name:', warning.name)
+ console.log('Message:', warning.message)
+ console.log('Stack:', warning.stack)
+ console.log('-------------------')
+ })
+ }
+
runner.on('start', () => {
console.log()
})
@@ -79,6 +89,45 @@ class Cli extends Base {
output.test.started(test)
}
})
+
+ if (!codeceptjsEventDispatchersRegistered) {
+ codeceptjsEventDispatchersRegistered = true
+
+ event.dispatcher.on(event.bddStep.started, step => {
+ output.stepShift = 2
+ output.step(step)
+ })
+
+ event.dispatcher.on(event.step.started, step => {
+ let processingStep = step
+ const metaSteps = []
+ let isHidden = false
+ while (processingStep.metaStep) {
+ metaSteps.unshift(processingStep.metaStep)
+ processingStep = processingStep.metaStep
+ if (processingStep.collapsed) isHidden = true
+ }
+ const shift = metaSteps.length
+
+ for (let i = 0; i < Math.max(currentMetaStep.length, metaSteps.length); i++) {
+ if (currentMetaStep[i] !== metaSteps[i]) {
+ output.stepShift = 3 + 2 * i
+ if (!metaSteps[i]) continue
+ // bdd steps are handled by bddStep.started
+ if (metaSteps[i].isBDD()) continue
+ output.step(metaSteps[i])
+ }
+ }
+ currentMetaStep = metaSteps
+ if (isHidden) return
+ output.stepShift = 3 + 2 * shift
+ output.step(step)
+ })
+
+ event.dispatcher.on(event.step.finished, () => {
+ output.stepShift = 0
+ })
+ }
}
runner.on('suite end', suite => {
@@ -100,16 +149,19 @@ class Cli extends Base {
}
}
- this.stats.pending += skippedCount
- this.stats.tests += skippedCount
+ const container = require('../container')
+ container.result().addStats({ pending: skippedCount, tests: skippedCount })
})
runner.on('end', this.result.bind(this))
}
result() {
- const stats = this.stats
- stats.failedHooks = 0
+ const container = require('../container')
+ container.result().addStats(this.stats)
+ container.result().finish()
+
+ const stats = container.result().stats
console.log()
// passes
@@ -122,7 +174,8 @@ class Cli extends Base {
// failures
if (stats.failures) {
// append step traces
- this.failures.map(test => {
+ this.failures = this.failures.map(test => {
+ // we will change the stack trace, so we need to clone the test
const err = test.err
let log = ''
@@ -132,8 +185,18 @@ class Cli extends Base {
err.message = err.inspect()
}
- // multi-line error messages
- err.message = '\n ' + (err.message || '').replace(/^/gm, ' ').trim()
+ // multi-line error messages (for Playwright)
+ if (err.message && err.message.includes('\n')) {
+ const lines = err.message.split('\n')
+ const truncatedLines = lines.slice(0, 5)
+ if (lines.length > 5) {
+ truncatedLines.push('...')
+ }
+ err.message = truncatedLines.join('\n').replace(/^/gm, ' ').trim()
+ }
+
+ // add new line before the message
+ err.message = '\n ' + err.message
const steps = test.steps || (test.ctx && test.ctx.test.steps)
@@ -166,25 +229,30 @@ class Cli extends Base {
try {
let stack = err.stack
- stack = stack.replace(originalMessage, '')
+ stack = (stack || '').replace(originalMessage, '')
stack = stack ? stack.split('\n') : []
if (stack[0] && stack[0].includes(err.message)) {
stack.shift()
}
+ if (stack[0] && stack[0].trim() == 'Error:') {
+ stack.shift()
+ }
+
if (output.level() < 3) {
stack = stack.slice(0, 3)
}
err.stack = `${stack.join('\n')}\n\n${output.colors.blue(log)}`
-
- // clone err object so stack trace adjustments won't affect test other reports
- test.err = err
- return test
} catch (e) {
- throw Error(e)
+ console.error(e)
}
+
+ // we will change the stack trace, so we need to clone the test
+ test = cloneTest(test)
+ test.err = err
+ return test
})
const originalLog = Base.consoleLog
@@ -197,12 +265,8 @@ class Cli extends Base {
console.log()
}
- this.failures.forEach(failure => {
- if (failure.constructor.name === 'Hook') {
- stats.failedHooks += 1
- }
- })
- event.emit(event.all.failures, { failuresLog, stats })
+ container.result().addFailures(failuresLog)
+
output.result(stats.passes, stats.failures, stats.pending, ms(stats.duration), stats.failedHooks)
if (stats.failures && output.level() < 3) {
diff --git a/lib/mocha/hooks.js b/lib/mocha/hooks.js
index 40b3b46e0..0dedc0adf 100644
--- a/lib/mocha/hooks.js
+++ b/lib/mocha/hooks.js
@@ -1,19 +1,47 @@
const event = require('../event')
+const { serializeError } = require('../utils')
+// const { serializeTest } = require('./test')
+/**
+ * Represents a test hook in the testing framework
+ * @class
+ * @property {Object} suite - The test suite this hook belongs to
+ * @property {Object} test - The test object associated with this hook
+ * @property {Object} runnable - The current test being executed
+ * @property {Object} ctx - The context object
+ * @property {Error|null} err - The error that occurred during hook execution, if any
+ */
class Hook {
+ /**
+ * Creates a new Hook instance
+ * @param {Object} context - The context object containing suite and test information
+ * @param {Object} context.suite - The test suite
+ * @param {Object} context.test - The test object
+ * @param {Object} context.ctx - The context object
+ * @param {Error} error - The error object if hook execution failed
+ */
constructor(context, error) {
this.suite = context.suite
this.test = context.test
this.runnable = context?.ctx?.test
this.ctx = context.ctx
- this.error = error
- this.steps = []
+ this.err = error
}
get hookName() {
return this.constructor.name.replace('Hook', '')
}
+ simplify() {
+ return {
+ hookName: this.hookName,
+ title: this.title,
+ // test: this.test ? serializeTest(this.test) : null,
+ // suite: this.suite ? serializeSuite(this.suite) : null,
+ error: this.err ? serializeError(this.err) : null,
+ }
+ }
+
toString() {
return this.hookName
}
@@ -47,13 +75,13 @@ function fireHook(eventType, suite, error) {
const hook = suite.ctx?.test?.title?.match(/"([^"]*)"/)[1]
switch (hook) {
case 'before each':
- event.emit(eventType, new BeforeHook(suite))
+ event.emit(eventType, new BeforeHook(suite, error))
break
case 'after each':
event.emit(eventType, new AfterHook(suite, error))
break
case 'before all':
- event.emit(eventType, new BeforeSuiteHook(suite))
+ event.emit(eventType, new BeforeSuiteHook(suite, error))
break
case 'after all':
event.emit(eventType, new AfterSuiteHook(suite, error))
diff --git a/lib/mocha/suite.js b/lib/mocha/suite.js
index be96439d6..ab9e5ec1f 100644
--- a/lib/mocha/suite.js
+++ b/lib/mocha/suite.js
@@ -1,5 +1,4 @@
const MochaSuite = require('mocha/lib/suite')
-
/**
* @typedef {import('mocha')} Mocha
*/
@@ -34,6 +33,10 @@ function enhanceMochaSuite(suite) {
}
}
+ suite.simplify = function () {
+ return serializeSuite(this)
+ }
+
return suite
}
@@ -49,7 +52,30 @@ function createSuite(parent, title) {
return enhanceMochaSuite(suite)
}
+function serializeSuite(suite) {
+ suite = { ...suite }
+
+ return {
+ opts: suite.opts || {},
+ tags: suite.tags || [],
+ retries: suite._retries,
+ title: suite.title,
+ status: suite.status,
+ notes: suite.notes || [],
+ meta: suite.meta || {},
+ duration: suite.duration || 0,
+ }
+}
+
+function deserializeSuite(suite) {
+ suite = Object.assign(new MochaSuite(suite.title), suite)
+ enhanceMochaSuite(suite)
+ return suite
+}
+
module.exports = {
createSuite,
enhanceMochaSuite,
+ serializeSuite,
+ deserializeSuite,
}
diff --git a/lib/mocha/test.js b/lib/mocha/test.js
index 5c326a346..a10472f6b 100644
--- a/lib/mocha/test.js
+++ b/lib/mocha/test.js
@@ -1,9 +1,9 @@
const Test = require('mocha/lib/test')
const Suite = require('mocha/lib/suite')
const { test: testWrapper } = require('./asyncWrapper')
-const { enhanceMochaSuite } = require('./suite')
-const { genTestId } = require('../utils')
-
+const { enhanceMochaSuite, createSuite } = require('./suite')
+const { genTestId, serializeError, clearString, relativeDir } = require('../utils')
+const Step = require('../step/base')
/**
* Factory function to create enhanced tests
* @param {string} title - Test title
@@ -46,6 +46,7 @@ function enhanceMochaTest(test) {
test.addToSuite = function (suite) {
enhanceMochaSuite(suite)
suite.addTest(testWrapper(this))
+ if (test.file && !suite.file) suite.file = test.file
test.tags = [...(test.tags || []), ...(suite.tags || [])]
test.fullTitle = () => `${suite.title}: ${test.title}`
test.uid = genTestId(test)
@@ -59,17 +60,100 @@ function enhanceMochaTest(test) {
if (opts.retries) this.retries(opts.retries)
}
+ test.simplify = function () {
+ return serializeTest(this)
+ }
+
return test
}
-function repackTestForWorkersTransport(test) {
- test = Object.assign(new Test(test.title || '', () => {}), test)
- test.parent = Object.assign(new Suite(test.parent.title), test.parent)
+function deserializeTest(test) {
+ test = Object.assign(
+ createTest(test.title || '', () => {}),
+ test,
+ )
+ test.parent = Object.assign(new Suite(test.parent?.title || 'Suite'), test.parent)
+ enhanceMochaSuite(test.parent)
+ if (test.steps) test.steps = test.steps.map(step => Object.assign(new Step(step.title), step))
return test
}
+function serializeTest(test, error = null) {
+ // test = { ...test }
+
+ if (test.start && !test.duration) {
+ const end = +new Date()
+ test.duration = end - test.start
+ }
+
+ let err
+
+ if (test.err) {
+ err = serializeError(test.err)
+ test.state = 'failed'
+ } else if (error) {
+ err = serializeError(error)
+ test.state = 'failed'
+ }
+ const parent = {}
+ if (test.parent) {
+ parent.title = test.parent.title
+ }
+
+ if (test.opts) {
+ Object.keys(test.opts).forEach(k => {
+ if (typeof test.opts[k] === 'object') delete test.opts[k]
+ if (typeof test.opts[k] === 'function') delete test.opts[k]
+ })
+ }
+
+ let steps = undefined
+ if (Array.isArray(test.steps)) {
+ steps = test.steps.map(step => (step.simplify ? step.simplify() : step))
+ }
+
+ return {
+ opts: test.opts || {},
+ tags: test.tags || [],
+ uid: test.uid,
+ retries: test._retries,
+ title: test.title,
+ state: test.state,
+ notes: test.notes || [],
+ meta: test.meta || {},
+ artifacts: test.artifacts || {},
+ duration: test.duration || 0,
+ err,
+ parent,
+ steps,
+ }
+}
+
+function cloneTest(test) {
+ return deserializeTest(serializeTest(test))
+}
+
+function testToFileName(test) {
+ let fileName = clearString(test.title)
+ // remove tags with empty string (disable for now)
+ // fileName = fileName.replace(/\@\w+/g, '')
+ fileName = fileName.slice(0, 100)
+ if (fileName.indexOf('{') !== -1) {
+ fileName = fileName.substr(0, fileName.indexOf('{') - 3).trim()
+ }
+ if (test.ctx && test.ctx.test && test.ctx.test.type === 'hook') fileName = clearString(`${test.title}_${test.ctx.test.title}`)
+ // TODO: add suite title to file name
+ // if (test.parent && test.parent.title) {
+ // fileName = `${clearString(test.parent.title)}_${fileName}`
+ // }
+ return fileName
+}
+
module.exports = {
createTest,
+ testToFileName,
enhanceMochaTest,
- repackTestForWorkersTransport,
+ serializeTest,
+ deserializeTest,
+ cloneTest,
}
diff --git a/lib/mocha/types.d.ts b/lib/mocha/types.d.ts
index 2bdb55a56..9069f72dd 100644
--- a/lib/mocha/types.d.ts
+++ b/lib/mocha/types.d.ts
@@ -12,14 +12,19 @@ declare global {
type: string
text: string
}>
+ state: string
+ err?: Error
config: Record
artifacts: string[]
inject: Record
opts: Record
throws?: Error | string | RegExp | Function
totalTimeout?: number
+ relativeFile?: string
addToSuite(suite: Mocha.Suite): void
applyOptions(opts: Record): void
+ simplify(): Record
+ toFileName(): string
addNote(type: string, note: string): void
codeceptjs: boolean
}
diff --git a/lib/output.js b/lib/output.js
index fc209d043..919764635 100644
--- a/lib/output.js
+++ b/lib/output.js
@@ -136,6 +136,7 @@ module.exports = {
started: suite => {
if (!suite.title) return
print(`${colors.bold(suite.title)} --`)
+ if (suite.file && outputLevel >= 1) print(colors.underline.grey(suite.file))
if (suite.comment) print(suite.comment)
},
},
diff --git a/lib/plugin/analyze.js b/lib/plugin/analyze.js
new file mode 100644
index 000000000..e92e38a3a
--- /dev/null
+++ b/lib/plugin/analyze.js
@@ -0,0 +1,351 @@
+const debug = require('debug')('codeceptjs:analyze')
+const { isMainThread } = require('node:worker_threads')
+const { arrowRight } = require('figures')
+const container = require('../container')
+const ai = require('../ai')
+const colors = require('chalk')
+const ora = require('ora-classic')
+const event = require('../event')
+const output = require('../output')
+const { ansiRegExp, base64EncodeFile, markdownToAnsi } = require('../utils')
+
+const MAX_DATA_LENGTH = 5000
+
+const defaultConfig = {
+ clusterize: 2,
+ analyze: 3,
+ vision: false,
+ categories: [
+ 'Browser connection error / browser crash',
+ 'Network errors (server error, timeout, etc)',
+ 'HTML / page elements (not found, not visible, etc)',
+ 'Navigation errors (404, etc)',
+ 'Code errors (syntax error, JS errors, etc)',
+ 'Library & framework errors (CodeceptJS internal errors, user-defined libraries, etc)',
+ 'Data errors (password incorrect, no options in select, invalid format, etc)',
+ 'Assertion failures',
+ 'Other errors',
+ ],
+ prompts: {
+ clusterize: (tests, config) => {
+ const serializedFailedTests = tests
+ .map((test, index) => {
+ if (!test || !test.err) return
+ return `
+ #${index + 1}: ${serializeTest(test)}
+ ${serializeError(test.err).slice(0, MAX_DATA_LENGTH / tests.length)}`.trim()
+ })
+ .join('\n\n--------\n\n')
+
+ const messages = [
+ {
+ role: 'user',
+ content: `
+ I am test analyst analyzing failed tests in CodeceptJS testing framework.
+
+ Please analyze the following failed tests and classify them into groups by their cause.
+ If there is no groups detected, say: "No common groups found".
+
+ Provide a short description of the group and a list of failed tests that belong to this group.
+ Use percent sign to indicate the percentage of failed tests in the group if this percentage is greater than 30%.
+
+ Here are failed tests:
+
+ ${serializedFailedTests}
+
+ Common categories of failures by order of priority:
+
+ ${config.categories.join('\n- ')}
+
+ If there is no groups of tests, say: "No patterns found"
+ Preserve error messages but cut them if they are too long.
+ Respond clearly and directly, without introductory words or phrases like βOf course,β βHere is the answer,β etc.
+ Do not list more than 3 errors in the group.
+ If you identify that all tests in the group have the same tag, add this tag to the group report, otherwise ignore TAG section.
+ If you identify that all tests in the group have the same suite, add this suite to the group report, otherwise ignore SUITE section.
+ Pick different emojis for each group.
+ Do not include group into report if it has only one test in affected tests section.
+
+ Provide list of groups in following format:
+
+ _______________________________
+
+ ## Group
+
+ * CATEGORY
+ * ERROR , , ...
+ * SUMMARY
+ * STEP (use CodeceptJS format I.click(), I.see(), etc; if all failures happend on the same step)
+ * SUITE , (if SUITE is present, and if all tests in the group have the same suite or suites)
+ * TAG (if TAG is present, and if all tests in the group have the same tag)
+ * AFFECTED TESTS ():
+ x
+ x
+ x
+ x ...
+ `,
+ },
+ {
+ role: 'assistant',
+ content: `## '
+ `,
+ },
+ ]
+ return messages
+ },
+ analyze: (test, config) => {
+ const testMessage = serializeTest(test)
+ const errorMessage = serializeError(test.err)
+
+ const messages = [
+ {
+ role: 'user',
+ content: [
+ {
+ type: 'text',
+ text: `
+ I am qa engineer analyzing failed tests in CodeceptJS testing framework.
+ Please analyze the following failed test and error its error and explain it.
+
+ Pick one of the categories of failures and explain it.
+
+ Categories of failures in order of priority:
+
+ ${config.categories.join('\n- ')}
+
+ Here is the test and error:
+
+ ------- TEST -------
+ ${testMessage}
+
+ ------- ERROR -------
+ ${errorMessage}
+
+ ------ INSTRUCTIONS ------
+
+ Do not get to details, be concise.
+ If there is failed step, just write it in STEPS section.
+ If you have suggestions for the test, write them in SUMMARY section.
+ Inside SUMMARY write exact values, if you have suggestions, explain which information you used to suggest.
+ Be concise, each section should not take more than one sentence.
+
+ Response format:
+
+ * CATEGORY
+ * STEPS
+ * SUMMARY
+
+ Do not add any other sections or explanations. Only CATEGORY, SUMMARY, STEPS.
+ ${config.vision ? 'Also a screenshot of the page is attached to the prompt.' : ''}
+ `,
+ },
+ ],
+ },
+ ]
+
+ if (config.vision && test.artifacts.screenshot) {
+ debug('Adding screenshot to prompt')
+ messages[0].content.push({
+ type: 'image_url',
+ image_url: {
+ url: 'data:image/png;base64,' + base64EncodeFile(test.artifacts.screenshot),
+ },
+ })
+ }
+
+ messages.push({
+ role: 'assistant',
+ content: `## `,
+ })
+
+ return messages
+ },
+ },
+}
+
+/**
+ *
+ * @param {*} config
+ * @returns
+ */
+module.exports = function (config = {}) {
+ config = Object.assign(defaultConfig, config)
+
+ event.dispatcher.on(event.workers.before, () => {
+ if (!ai.isEnabled) return
+ console.log('Enabled AI analysis')
+ })
+
+ event.dispatcher.on(event.all.result, async result => {
+ if (!isMainThread) return // run only on main thread
+ if (!ai.isEnabled) {
+ console.log('AI is disabled, no analysis will be performed. Run tests with --ai flag to enable it.')
+ return
+ }
+
+ printReport(result)
+ })
+
+ event.dispatcher.on(event.workers.result, async result => {
+ if (!result.hasFailed) {
+ console.log('Everything is fine, skipping AI analysis')
+ return
+ }
+
+ if (!ai.isEnabled) {
+ console.log('AI is disabled, no analysis will be performed. Run tests with --ai flag to enable it.')
+ return
+ }
+
+ printReport(result)
+ })
+
+ async function printReport(result) {
+ const failedTestsAndErrors = result.tests.filter(t => t.err)
+
+ if (!failedTestsAndErrors.length) return
+
+ debug(failedTestsAndErrors.map(t => serializeTest(t) + '\n' + serializeError(t.err)))
+
+ try {
+ if (failedTestsAndErrors.length >= config.clusterize) {
+ const response = await clusterize(failedTestsAndErrors)
+ printHeader()
+ console.log(response)
+ return
+ }
+
+ output.plugin('analyze', `Analyzing first ${config.analyze} failed tests...`)
+
+ // we pick only unique errors to not repeat answers
+ const uniqueErrors = failedTestsAndErrors.filter((item, index, array) => {
+ return array.findIndex(t => t.err?.message === item.err?.message) === index
+ })
+
+ for (let i = 0; i < config.analyze; i++) {
+ if (!uniqueErrors[i]) break
+
+ const response = await analyze(uniqueErrors[i])
+ if (!response) {
+ break
+ }
+
+ printHeader()
+ console.log()
+ console.log('--------------------------------')
+ console.log(arrowRight, colors.bold.white(uniqueErrors[i].fullTitle()), config.vision ? 'π' : '')
+ console.log()
+ console.log()
+ console.log(response)
+ console.log()
+ }
+ } catch (err) {
+ console.error('Error analyzing failed tests', err)
+ }
+
+ if (!Object.keys(container.plugins()).includes('pageInfo')) {
+ console.log('To improve analysis, enable pageInfo plugin to get more context for failed tests.')
+ }
+ }
+
+ let hasPrintedHeader = false
+
+ function printHeader() {
+ if (!hasPrintedHeader) {
+ console.log()
+ console.log(colors.bold.white('πͺ AI REPORT:'))
+ hasPrintedHeader = true
+ }
+ }
+
+ async function clusterize(failedTestsAndErrors) {
+ const spinner = ora('Clusterizing failures...').start()
+ const prompt = config.prompts.clusterize(failedTestsAndErrors, config)
+ try {
+ const response = await ai.createCompletion(prompt)
+ spinner.stop()
+ return formatResponse(response)
+ } catch (err) {
+ spinner.stop()
+ console.error('Error clusterizing failures', err.message)
+ }
+ }
+
+ async function analyze(failedTestAndError) {
+ const spinner = ora('Analyzing failure...').start()
+ const prompt = config.prompts.analyze(failedTestAndError, config)
+ try {
+ const response = await ai.createCompletion(prompt)
+ spinner.stop()
+ return formatResponse(response)
+ } catch (err) {
+ spinner.stop()
+ console.error('Error analyzing failure:', err.message)
+ }
+ }
+}
+
+function serializeError(error) {
+ if (typeof error === 'string') {
+ return error
+ }
+
+ if (!error) return
+
+ let errorMessage = 'ERROR: ' + error.message
+
+ if (error.inspect) {
+ errorMessage = 'ERROR: ' + error.inspect()
+ }
+
+ if (error.stack) {
+ errorMessage +=
+ '\n' +
+ error.stack
+ .replace(global.codecept_dir || '', '.')
+ .split('\n')
+ .map(line => line.replace(ansiRegExp(), ''))
+ .slice(0, 5)
+ .join('\n')
+ }
+ if (error.steps) {
+ errorMessage += '\n STEPS: ' + error.steps.map(s => s.toCode()).join('\n')
+ }
+ return errorMessage
+}
+
+function serializeTest(test) {
+ if (!test.uid) return
+
+ let testMessage = 'TEST TITLE: ' + test.title
+
+ if (test.suite) {
+ testMessage += '\n SUITE: ' + test.suite.title
+ }
+ if (test.parent) {
+ testMessage += '\n SUITE: ' + test.parent.title
+ }
+
+ if (test.steps?.length) {
+ const failedSteps = test.steps
+ if (failedSteps.length) testMessage += '\n STEP: ' + failedSteps.map(s => s.toCode()).join('; ')
+ }
+
+ const pageInfo = test.notes.find(n => n.type === 'pageInfo')
+ if (pageInfo) {
+ testMessage += '\n PAGE INFO: ' + pageInfo.text
+ }
+
+ return testMessage
+}
+
+function formatResponse(response) {
+ if (!response.startsWith('##')) response = '## ' + response
+ return response
+ .split('\n')
+ .map(line => line.trim())
+ .filter(line => !/^[A-Z\s]+$/.test(line))
+ .map(line => markdownToAnsi(line))
+ .map(line => line.replace(/^x /gm, ` ${colors.red.bold('x')} `))
+ .join('\n')
+}
diff --git a/lib/plugin/customReporter.js b/lib/plugin/customReporter.js
new file mode 100644
index 000000000..6b2cc0c05
--- /dev/null
+++ b/lib/plugin/customReporter.js
@@ -0,0 +1,52 @@
+const event = require('../event')
+
+/**
+ * Sample custom reporter for CodeceptJS.
+ */
+module.exports = function (config) {
+ event.dispatcher.on(event.hook.finished, hook => {
+ if (config.onHookFinished) {
+ config.onHookFinished(hook)
+ }
+ })
+
+ event.dispatcher.on(event.test.before, test => {
+ if (config.onTestBefore) {
+ config.onTestBefore(test)
+ }
+ })
+
+ event.dispatcher.on(event.test.failed, (test, err) => {
+ if (config.onTestFailed) {
+ config.onTestFailed(test, err)
+ }
+ })
+
+ event.dispatcher.on(event.test.passed, test => {
+ if (config.onTestPassed) {
+ config.onTestPassed(test)
+ }
+ })
+
+ event.dispatcher.on(event.test.skipped, test => {
+ if (config.onTestSkipped) {
+ config.onTestSkipped(test)
+ }
+ })
+
+ event.dispatcher.on(event.test.finished, test => {
+ if (config.onTestFinished) {
+ config.onTestFinished(test)
+ }
+ })
+
+ event.dispatcher.on(event.all.result, result => {
+ if (config.onResult) {
+ config.onResult(result)
+ }
+
+ if (config.save) {
+ result.save()
+ }
+ })
+}
diff --git a/lib/plugin/debugErrors.js b/lib/plugin/debugErrors.js
deleted file mode 100644
index c24255a62..000000000
--- a/lib/plugin/debugErrors.js
+++ /dev/null
@@ -1,67 +0,0 @@
-const Container = require('../container')
-const recorder = require('../recorder')
-const event = require('../event')
-const supportedHelpers = require('./standardActingHelpers')
-const { scanForErrorMessages } = require('../html')
-const { output } = require('..')
-
-const defaultConfig = {
- errorClasses: ['error', 'warning', 'alert', 'danger'],
-}
-
-/**
- * Prints errors found in HTML code after each failed test.
- *
- * It scans HTML and searches for elements with error classes.
- * If an element found prints a text from it to console and adds as artifact to the test.
- *
- * Enable this plugin in config:
- *
- * ```js
- * plugins: {
- * debugErrors: {
- * enabled: true,
- * }
- * ```
- *
- * Additional config options:
- *
- * * `errorClasses` - list of classes to search for errors (default: `['error', 'warning', 'alert', 'danger']`)
- *
- */
-module.exports = function (config = {}) {
- const helpers = Container.helpers()
- let helper
-
- config = Object.assign(defaultConfig, config)
-
- for (const helperName of supportedHelpers) {
- if (Object.keys(helpers).indexOf(helperName) > -1) {
- helper = helpers[helperName]
- }
- }
-
- if (!helper) return // no helpers for screenshot
-
- event.dispatcher.on(event.test.failed, test => {
- recorder.add('HTML snapshot failed test', async () => {
- try {
- const currentOutputLevel = output.level()
- output.level(0)
- const html = await helper.grabHTMLFrom('body')
- output.level(currentOutputLevel)
-
- if (!html) return
-
- const errors = scanForErrorMessages(html, config.errorClasses)
- if (errors.length) {
- output.debug('Detected errors in HTML code')
- errors.forEach(error => output.debug(error))
- test.artifacts.errors = errors
- }
- } catch (err) {
- // not really needed
- }
- })
- })
-}
diff --git a/lib/plugin/heal.js b/lib/plugin/heal.js
index 35644f875..abf788122 100644
--- a/lib/plugin/heal.js
+++ b/lib/plugin/heal.js
@@ -117,10 +117,10 @@ module.exports = function (config = {}) {
}
})
- event.dispatcher.on(event.workers.result, ({ tests }) => {
+ event.dispatcher.on(event.workers.result, result => {
const { print } = output
- const healedTests = Object.values(tests)
+ const healedTests = Object.values(result.tests)
.flat()
.filter(test => test.notes.some(note => note.type === 'heal'))
if (!healedTests.length) return
diff --git a/lib/plugin/pageInfo.js b/lib/plugin/pageInfo.js
new file mode 100644
index 000000000..950b500aa
--- /dev/null
+++ b/lib/plugin/pageInfo.js
@@ -0,0 +1,143 @@
+const path = require('path')
+const fs = require('fs')
+const Container = require('../container')
+const recorder = require('../recorder')
+const event = require('../event')
+const supportedHelpers = require('./standardActingHelpers')
+const { scanForErrorMessages } = require('../html')
+const { output } = require('..')
+const { humanizeString, ucfirst } = require('../utils')
+const { testToFileName } = require('../mocha/test')
+const defaultConfig = {
+ errorClasses: ['error', 'warning', 'alert', 'danger'],
+ browserLogs: ['error'],
+}
+
+/**
+ * Collects information from web page after each failed test and adds it to the test as an artifact.
+ * It is suggested to enable this plugin if you run tests on CI and you need to debug failed tests.
+ * This plugin can be paired with `analyze` plugin to provide more context.
+ *
+ * It collects URL, HTML errors (by classes), and browser logs.
+ *
+ * Enable this plugin in config:
+ *
+ * ```js
+ * plugins: {
+ * pageInfo: {
+ * enabled: true,
+ * }
+ * ```
+ *
+ * Additional config options:
+ *
+ * * `errorClasses` - list of classes to search for errors (default: `['error', 'warning', 'alert', 'danger']`)
+ * * `browserLogs` - list of types of errors to search for in browser logs (default: `['error']`)
+ *
+ */
+module.exports = function (config = {}) {
+ const helpers = Container.helpers()
+ let helper
+
+ config = Object.assign(defaultConfig, config)
+
+ for (const helperName of supportedHelpers) {
+ if (Object.keys(helpers).indexOf(helperName) > -1) {
+ helper = helpers[helperName]
+ }
+ }
+
+ if (!helper) return // no helpers for screenshot
+
+ event.dispatcher.on(event.test.failed, test => {
+ const pageState = {}
+
+ recorder.add('URL of failed test', async () => {
+ try {
+ const url = await helper.grabCurrentUrl()
+ pageState.url = url
+ } catch (err) {
+ // not really needed
+ }
+ })
+ recorder.add('HTML snapshot failed test', async () => {
+ try {
+ const currentOutputLevel = output.level()
+ output.level(0)
+ const html = await helper.grabHTMLFrom('body')
+ output.level(currentOutputLevel)
+
+ if (!html) return
+
+ const errors = scanForErrorMessages(html, config.errorClasses)
+ if (errors.length) {
+ output.debug('Detected errors in HTML code')
+ errors.forEach(error => output.debug(error))
+ pageState.htmlErrors = errors
+ }
+ } catch (err) {
+ // not really needed
+ }
+ })
+
+ recorder.add('Browser logs for failed test', async () => {
+ try {
+ const logs = await helper.grabBrowserLogs()
+
+ if (!logs) return
+
+ pageState.browserErrors = getBrowserErrors(logs, config.browserLogs)
+ } catch (err) {
+ // not really needed
+ }
+ })
+
+ recorder.add('Save page info', () => {
+ test.addNote('pageInfo', pageStateToMarkdown(pageState))
+
+ const pageStateFileName = path.join(global.output_dir, `${testToFileName(test)}.pageInfo.md`)
+ fs.writeFileSync(pageStateFileName, pageStateToMarkdown(pageState))
+ test.artifacts.pageInfo = pageStateFileName
+ return pageState
+ })
+ })
+}
+
+function pageStateToMarkdown(pageState) {
+ let markdown = ''
+
+ for (const [key, value] of Object.entries(pageState)) {
+ if (!value) continue
+ let result = ''
+
+ if (Array.isArray(value)) {
+ result = value.map(v => `- ${JSON.stringify(v, null, 2)}`).join('\n')
+ } else if (typeof value === 'string') {
+ result = `${value}`
+ } else {
+ result = JSON.stringify(value, null, 2)
+ }
+
+ if (!result.trim()) continue
+
+ markdown += `### ${ucfirst(humanizeString(key))}\n\n`
+ markdown += result
+ markdown += '\n\n'
+ }
+
+ return markdown
+}
+
+function getBrowserErrors(logs, type = ['error']) {
+ // Playwright & WebDriver console messages
+ let errors = logs
+ .map(log => {
+ if (typeof log === 'string') return log
+ if (!log.type) return null
+ return { type: log.type(), text: log.text() }
+ })
+ .filter(l => l && (typeof l === 'string' || type.includes(l.type)))
+ .map(l => (typeof l === 'string' ? l : l.text))
+
+ return errors
+}
diff --git a/lib/plugin/retryTo.js b/lib/plugin/retryTo.js
index ed9c3cdfc..71405d974 100644
--- a/lib/plugin/retryTo.js
+++ b/lib/plugin/retryTo.js
@@ -1,6 +1,8 @@
-module.exports = function () {
+const { retryTo } = require('../effects')
+
+module.exports = function (config) {
console.log(`
-Deprecated Warning: 'retryTo' has been moved to the effects module.
+Deprecation Warning: 'retryTo' has been moved to the effects module.
You should update your tests to use it as follows:
\`\`\`javascript
@@ -16,4 +18,10 @@ await retryTo((tryNum) => {
For more details, refer to the documentation.
`)
+
+ if (config.registerGlobal) {
+ global.retryTo = retryTo
+ }
+
+ return retryTo
}
diff --git a/lib/plugin/screenshotOnFail.js b/lib/plugin/screenshotOnFail.js
index bf7a461dc..f35b9d052 100644
--- a/lib/plugin/screenshotOnFail.js
+++ b/lib/plugin/screenshotOnFail.js
@@ -5,8 +5,9 @@ const Container = require('../container')
const recorder = require('../recorder')
const event = require('../event')
const output = require('../output')
-const { fileExists, clearString } = require('../utils')
+const { fileExists } = require('../utils')
const Codeceptjs = require('../index')
+const { testToFileName } = require('../mocha/test')
const defaultConfig = {
uniqueScreenshotNames: false,
@@ -79,13 +80,10 @@ module.exports = function (config) {
recorder.add(
'screenshot of failed test',
async () => {
- let fileName = clearString(test.title)
const dataType = 'image/png'
// This prevents data driven to be included in the failed screenshot file name
- if (fileName.indexOf('{') !== -1) {
- fileName = fileName.substr(0, fileName.indexOf('{') - 3).trim()
- }
- if (test.ctx && test.ctx.test && test.ctx.test.type === 'hook') fileName = clearString(`${test.title}_${test.ctx.test.title}`)
+ let fileName = testToFileName(test)
+
if (options.uniqueScreenshotNames && test) {
const uuid = _getUUID(test)
fileName = `${fileName.substring(0, 10)}_${uuid}.failed.png`
diff --git a/lib/plugin/stepTimeout.js b/lib/plugin/stepTimeout.js
index 36f06d5c1..d512c6b0a 100644
--- a/lib/plugin/stepTimeout.js
+++ b/lib/plugin/stepTimeout.js
@@ -1,5 +1,5 @@
const event = require('../event')
-const { TIMEOUT_ORDER } = require('../step/timeout')
+const { TIMEOUT_ORDER } = require('../timeout')
const defaultConfig = {
timeout: 150,
diff --git a/lib/plugin/tryTo.js b/lib/plugin/tryTo.js
index 195cea28a..2eb77245c 100644
--- a/lib/plugin/tryTo.js
+++ b/lib/plugin/tryTo.js
@@ -1,4 +1,6 @@
-module.exports = function () {
+const { tryTo } = require('../effects')
+
+module.exports = function (config) {
console.log(`
Deprecated Warning: 'tryTo' has been moved to the effects module.
You should update your tests to use it as follows:
@@ -14,4 +16,10 @@ await tryTo(() => {
For more details, refer to the documentation.
`)
+
+ if (config.registerGlobal) {
+ global.tryTo = tryTo
+ }
+
+ return tryTo
}
diff --git a/lib/recorder.js b/lib/recorder.js
index fa60727d5..5f7dbd59b 100644
--- a/lib/recorder.js
+++ b/lib/recorder.js
@@ -3,7 +3,7 @@ const promiseRetry = require('promise-retry')
const chalk = require('chalk')
const { printObjectProperties } = require('./utils')
const { log } = require('./output')
-
+const { TimeoutError } = require('./timeout')
const MAX_TASKS = 100
let promise
@@ -191,13 +191,13 @@ module.exports = {
.slice(-1)
.pop()
// no retries or unnamed tasks
+ debug(`${currentQueue()} Running | ${taskName} | Timeout: ${timeout || 'None'}`)
+
if (!retryOpts || !taskName || !retry) {
const [promise, timer] = getTimeoutPromise(timeout, taskName)
return Promise.race([promise, Promise.resolve(res).then(fn)]).finally(() => clearTimeout(timer))
}
- debug(`${currentQueue()} Running | ${taskName}`)
-
const retryRules = this.retries.slice().reverse()
return promiseRetry(Object.assign(defaultRetryOptions, retryOpts), (retry, number) => {
if (number > 1) log(`${currentQueue()}Retrying... Attempt #${number}`)
@@ -386,7 +386,7 @@ function getTimeoutPromise(timeoutMs, taskName) {
return [
new Promise((done, reject) => {
timer = setTimeout(() => {
- reject(new Error(`Action ${taskName} was interrupted on step timeout ${timeoutMs}ms`))
+ reject(new TimeoutError(`Action ${taskName} was interrupted on timeout ${timeoutMs}ms`))
}, timeoutMs || 2e9)
}),
timer,
diff --git a/lib/rerun.js b/lib/rerun.js
index df4549209..a4f700ffe 100644
--- a/lib/rerun.js
+++ b/lib/rerun.js
@@ -1,81 +1,82 @@
-const fsPath = require('path');
-const container = require('./container');
-const event = require('./event');
-const BaseCodecept = require('./codecept');
-const output = require('./output');
+const fsPath = require('path')
+const container = require('./container')
+const event = require('./event')
+const BaseCodecept = require('./codecept')
+const output = require('./output')
class CodeceptRerunner extends BaseCodecept {
runOnce(test) {
return new Promise((resolve, reject) => {
// @ts-ignore
- container.createMocha();
- const mocha = container.mocha();
- this.testFiles.forEach((file) => {
- delete require.cache[file];
- });
- mocha.files = this.testFiles;
+ container.createMocha()
+ const mocha = container.mocha()
+ this.testFiles.forEach(file => {
+ delete require.cache[file]
+ })
+ mocha.files = this.testFiles
if (test) {
if (!fsPath.isAbsolute(test)) {
- test = fsPath.join(global.codecept_dir, test);
+ test = fsPath.join(global.codecept_dir, test)
}
- mocha.files = mocha.files.filter(t => fsPath.basename(t, '.js') === test || t === test);
+ mocha.files = mocha.files.filter(t => fsPath.basename(t, '.js') === test || t === test)
}
try {
- mocha.run((failures) => {
+ mocha.run(failures => {
if (failures === 0) {
- resolve();
+ resolve()
} else {
- reject(new Error(`${failures} tests fail`));
+ reject(new Error(`${failures} tests fail`))
}
- });
+ })
} catch (e) {
- reject(e);
+ reject(e)
}
- });
+ })
}
async runTests(test) {
- const configRerun = this.config.rerun || {};
- const minSuccess = configRerun.minSuccess || 1;
- const maxReruns = configRerun.maxReruns || 1;
+ const configRerun = this.config.rerun || {}
+ const minSuccess = configRerun.minSuccess || 1
+ const maxReruns = configRerun.maxReruns || 1
if (minSuccess > maxReruns) {
- process.exitCode = 1;
- throw new Error(`run-rerun Configuration Error: minSuccess must be less than maxReruns. Current values: minSuccess=${minSuccess} maxReruns=${maxReruns}`);
+ process.exitCode = 1
+ throw new Error(`run-rerun Configuration Error: minSuccess must be less than maxReruns. Current values: minSuccess=${minSuccess} maxReruns=${maxReruns}`)
}
if (maxReruns === 1) {
- await this.runOnce(test);
- return;
+ await this.runOnce(test)
+ return
}
- let successCounter = 0;
- let rerunsCounter = 0;
+ let successCounter = 0
+ let rerunsCounter = 0
while (rerunsCounter < maxReruns && successCounter < minSuccess) {
- rerunsCounter++;
+ container.result().reset() // reset result
+ rerunsCounter++
try {
- await this.runOnce(test);
- successCounter++;
- output.success(`\nProcess run ${rerunsCounter} of max ${maxReruns}, success runs ${successCounter}/${minSuccess}\n`);
+ await this.runOnce(test)
+ successCounter++
+ output.success(`\nProcess run ${rerunsCounter} of max ${maxReruns}, success runs ${successCounter}/${minSuccess}\n`)
} catch (e) {
- output.error(`\nFail run ${rerunsCounter} of max ${maxReruns}, success runs ${successCounter}/${minSuccess} \n`);
- console.error(e);
+ output.error(`\nFail run ${rerunsCounter} of max ${maxReruns}, success runs ${successCounter}/${minSuccess} \n`)
+ console.error(e)
}
}
if (successCounter < minSuccess) {
- throw new Error(`Flaky tests detected! ${successCounter} success runs achieved instead of ${minSuccess} success runs expected`);
+ throw new Error(`Flaky tests detected! ${successCounter} success runs achieved instead of ${minSuccess} success runs expected`)
}
}
async run(test) {
- event.emit(event.all.before, this);
+ event.emit(event.all.before, this)
try {
- await this.runTests(test);
+ await this.runTests(test)
} catch (e) {
- output.error(e.stack);
- throw e;
+ output.error(e.stack)
+ throw e
} finally {
- event.emit(event.all.result, this);
- event.emit(event.all.after, this);
+ event.emit(event.all.result, this)
+ event.emit(event.all.after, this)
}
}
}
-module.exports = CodeceptRerunner;
+module.exports = CodeceptRerunner
diff --git a/lib/result.js b/lib/result.js
new file mode 100644
index 000000000..9e562d8fc
--- /dev/null
+++ b/lib/result.js
@@ -0,0 +1,161 @@
+const fs = require('fs')
+const path = require('path')
+const { serializeTest } = require('./mocha/test')
+
+/**
+ * Result of the test run
+ *
+ * @typedef {Object} Stats
+ * @property {number} passes
+ * @property {number} failures
+ * @property {number} tests
+ * @property {number} pending
+ * @property {number} failedHooks
+ * @property {Date} start
+ * @property {Date} end
+ * @property {number} duration
+ */
+class Result {
+ /**
+ * Create Result of the test run
+ */
+ constructor() {
+ this._startTime = new Date()
+ this._endTime = null
+
+ this.reset()
+ this.start()
+ }
+
+ reset() {
+ this._stats = {
+ passes: 0,
+ failures: 0,
+ tests: 0,
+ pending: 0,
+ failedHooks: 0,
+ start: null,
+ end: null,
+ duration: 0,
+ }
+
+ /** @type {CodeceptJS.Test[]} */
+ this._tests = []
+
+ /** @type {String[]} */
+ this._failures = []
+ }
+
+ start() {
+ this._startTime = new Date()
+ }
+
+ finish() {
+ this._endTime = new Date()
+ }
+
+ get hasFailed() {
+ return this._stats.failures > 0
+ }
+
+ get tests() {
+ return this._tests
+ }
+
+ get failures() {
+ return this._failures.filter(f => f && (!Array.isArray(f) || f.length > 0))
+ }
+
+ get stats() {
+ return this._stats
+ }
+
+ get startTime() {
+ return this._startTime
+ }
+
+ /**
+ * Add test to result
+ *
+ * @param {CodeceptJS.Test} test
+ */
+ addTest(test) {
+ const existingTestIndex = this._tests.findIndex(t => !!t.uid && t.uid === test.uid)
+ if (existingTestIndex >= 0) {
+ this._tests[existingTestIndex] = test
+ return
+ }
+
+ this._tests.push(test)
+ }
+
+ /**
+ * Add failures to result
+ *
+ * @param {String[]} newFailures
+ */
+ addFailures(newFailures) {
+ this._failures.push(...newFailures)
+ }
+
+ get hasFailures() {
+ return this.stats.failures > 0
+ }
+
+ get duration() {
+ return this._endTime ? +this._endTime - +this._startTime : 0
+ }
+
+ get failedTests() {
+ return this._tests.filter(test => test.state === 'failed')
+ }
+
+ get passedTests() {
+ return this._tests.filter(test => test.state === 'passed')
+ }
+
+ get skippedTests() {
+ return this._tests.filter(test => test.state === 'skipped' || test.state === 'pending')
+ }
+
+ simplify() {
+ return {
+ hasFailed: this.hasFailed,
+ stats: this.stats,
+ duration: this.duration,
+ tests: this._tests.map(test => serializeTest(test)),
+ failures: this._failures,
+ }
+ }
+
+ /**
+ * Save result to json file
+ *
+ * @param {string} fileName
+ */
+ save(fileName) {
+ if (!fileName) fileName = 'result.json'
+ fs.writeFileSync(path.join(global.output_dir, fileName), JSON.stringify(this.simplify(), null, 2))
+ }
+
+ /**
+ * Add stats to result
+ *
+ * @param {object} newStats
+ */
+ addStats(newStats = {}) {
+ this._stats.passes += newStats.passes || 0
+ this._stats.failures += newStats.failures || 0
+ this._stats.tests += newStats.tests || 0
+ this._stats.pending += newStats.pending || 0
+ this._stats.failedHooks += newStats.failedHooks || 0
+
+ // do not override start time
+ this._stats.start = this._stats.start || newStats.start
+
+ this._stats.end = newStats.end || this._stats.end
+ this._stats.duration = newStats.duration
+ }
+}
+
+module.exports = Result
diff --git a/lib/step/base.js b/lib/step/base.js
index 94afc675e..27ea059dd 100644
--- a/lib/step/base.js
+++ b/lib/step/base.js
@@ -1,7 +1,7 @@
const color = require('chalk')
const Secret = require('../secret')
-const { getCurrentTimeout } = require('./timeout')
-const { ucfirst, humanizeString } = require('../utils')
+const { getCurrentTimeout } = require('../timeout')
+const { ucfirst, humanizeString, serializeError } = require('../utils')
const STACK_LINE = 5
@@ -37,6 +37,9 @@ class Step {
/** @member {string} */
this.stack = ''
+ this.startTime = 0
+ this.endTime = 0
+
this.setTrace()
}
@@ -159,6 +162,51 @@ class Step {
return this.constructor.name === 'MetaStep'
}
+ get duration() {
+ if (!this.startTime || !this.endTime) return 0
+ return this.endTime - this.startTime
+ }
+
+ simplify() {
+ const step = this
+
+ const parent = {}
+ if (step.metaStep) {
+ parent.title = step.metaStep.actor
+ }
+
+ if (step.opts) {
+ Object.keys(step.opts).forEach(k => {
+ if (typeof step.opts[k] === 'object') delete step.opts[k]
+ if (typeof step.opts[k] === 'function') delete step.opts[k]
+ })
+ }
+
+ const args = []
+ if (step.args) {
+ for (const arg of step.args) {
+ // check if arg is a JOI object
+ if (arg && typeof arg === 'function') {
+ args.push(arg.name)
+ } else if (typeof arg == 'string') {
+ args.push(arg)
+ } else {
+ args.push(JSON.stringify(arg).slice(0, 300))
+ }
+ }
+ }
+
+ return {
+ opts: step.opts || {},
+ title: step.name,
+ args: args,
+ status: step.status,
+ startTime: step.startTime,
+ endTime: step.endTime,
+ parent,
+ }
+ }
+
/** @return {boolean} */
hasBDDAncestor() {
let hasBDD = false
diff --git a/lib/step/helper.js b/lib/step/helper.js
index b52470e3c..ade2a0d3d 100644
--- a/lib/step/helper.js
+++ b/lib/step/helper.js
@@ -16,6 +16,7 @@ class HelperStep extends Step {
*/
run() {
this.args = Array.prototype.slice.call(arguments)
+ this.startTime = +Date.now()
if (store.dryRun) {
this.setStatus('success')
@@ -27,7 +28,9 @@ class HelperStep extends Step {
result = this.helper[this.helperMethod].apply(this.helper, this.args)
}
this.setStatus('success')
+ this.endTime = +Date.now()
} catch (err) {
+ this.endTime = +Date.now()
this.setStatus('failed')
throw err
}
diff --git a/lib/step/record.js b/lib/step/record.js
index 40922b401..c29908adf 100644
--- a/lib/step/record.js
+++ b/lib/step/record.js
@@ -3,7 +3,7 @@ const recorder = require('../recorder')
const StepConfig = require('./config')
const { debug } = require('../output')
const store = require('../store')
-const { TIMEOUT_ORDER } = require('./timeout')
+const { TIMEOUT_ORDER } = require('../timeout')
const retryStep = require('./retry')
function recordStep(step, args) {
step.status = 'queued'
@@ -40,7 +40,7 @@ function recordStep(step, args) {
if (!step.startTime) {
// step can be retries
event.emit(event.step.started, step)
- step.startTime = Date.now()
+ step.startTime = +Date.now()
}
return (val = step.run(...args))
},
@@ -52,15 +52,15 @@ function recordStep(step, args) {
event.emit(event.step.after, step)
recorder.add('step passed', () => {
- step.endTime = Date.now()
+ step.endTime = +Date.now()
event.emit(event.step.passed, step, val)
event.emit(event.step.finished, step)
})
recorder.catchWithoutStop(err => {
step.status = 'failed'
- step.endTime = Date.now()
- event.emit(event.step.failed, step)
+ step.endTime = +Date.now()
+ event.emit(event.step.failed, step, err)
event.emit(event.step.finished, step)
throw err
})
diff --git a/lib/step/timeout.js b/lib/timeout.js
similarity index 74%
rename from lib/step/timeout.js
rename to lib/timeout.js
index 876644d41..ba9ba43b8 100644
--- a/lib/step/timeout.js
+++ b/lib/timeout.js
@@ -36,7 +36,31 @@ function getCurrentTimeout(timeouts) {
return totalTimeout
}
+class TimeoutError extends Error {
+ constructor(message) {
+ super(message)
+ this.name = 'TimeoutError'
+ }
+}
+
+class TestTimeoutError extends TimeoutError {
+ constructor(timeout) {
+ super(`Timeout ${timeout}s exceeded (with Before hook)`)
+ this.name = 'TestTimeoutError'
+ }
+}
+
+class StepTimeoutError extends TimeoutError {
+ constructor(timeout, step) {
+ super(`Step ${step.toCode().trim()} timed out after ${timeout}s`)
+ this.name = 'StepTimeoutError'
+ }
+}
+
module.exports = {
TIMEOUT_ORDER,
getCurrentTimeout,
+ TimeoutError,
+ TestTimeoutError,
+ StepTimeoutError,
}
diff --git a/lib/utils.js b/lib/utils.js
index 2aac45685..3696678d9 100644
--- a/lib/utils.js
+++ b/lib/utils.js
@@ -1,6 +1,7 @@
const fs = require('fs')
const os = require('os')
const path = require('path')
+const chalk = require('chalk')
const getFunctionArguments = require('fn-args')
const deepClone = require('lodash.clonedeep')
const { convertColorToRGBA, isColorProperty } = require('./colorUtils')
@@ -542,3 +543,37 @@ module.exports.humanizeString = function (string) {
_result[0] = _result[0] === 'i' ? this.ucfirst(_result[0]) : _result[0]
return _result.join(' ').trim()
}
+
+module.exports.serializeError = function (error) {
+ if (error) {
+ const { stack, uncaught, message, actual, expected } = error
+ return { stack, uncaught, message, actual, expected }
+ }
+ return null
+}
+
+module.exports.base64EncodeFile = function (filePath) {
+ return Buffer.from(fs.readFileSync(filePath)).toString('base64')
+}
+
+module.exports.markdownToAnsi = function (markdown) {
+ return (
+ markdown
+ // Headers (# Text) - make blue and bold
+ .replace(/^(#{1,6})\s+(.+)$/gm, (_, hashes, text) => {
+ return chalk.bold.blue(`${hashes} ${text}`)
+ })
+ // Bullet points - replace with yellow bullet character
+ .replace(/^[-*]\s+(.+)$/gm, (_, text) => {
+ return `${chalk.yellow('β’')} ${text}`
+ })
+ // Bold (**text**) - make bold
+ .replace(/\*\*(.+?)\*\*/g, (_, text) => {
+ return chalk.bold(text)
+ })
+ // Italic (*text*) - make italic (dim in terminals)
+ .replace(/\*(.+?)\*/g, (_, text) => {
+ return chalk.italic(text)
+ })
+ )
+}
diff --git a/lib/workers.js b/lib/workers.js
index c66aa2c40..1576263b3 100644
--- a/lib/workers.js
+++ b/lib/workers.js
@@ -12,7 +12,8 @@ const { replaceValueDeep, deepClone } = require('./utils')
const mainConfig = require('./config')
const output = require('./output')
const event = require('./event')
-const { repackTestForWorkersTransport: repackTest } = require('./mocha/test')
+const { deserializeTest } = require('./mocha/test')
+const { deserializeSuite } = require('./mocha/suite')
const recorder = require('./recorder')
const runHook = require('./hooks')
const WorkerStorage = require('./workerStorage')
@@ -230,17 +231,10 @@ class Workers extends EventEmitter {
super()
this.setMaxListeners(50)
this.codecept = initializeCodecept(config.testConfig, config.options)
- this.failuresLog = []
this.errors = []
this.numberOfWorkers = 0
this.closedWorkers = 0
this.workers = []
- this.stats = {
- passes: 0,
- failures: 0,
- tests: 0,
- pending: 0,
- }
this.testGroups = []
createOutputDir(config.testConfig)
@@ -353,8 +347,6 @@ class Workers extends EventEmitter {
}
run() {
- this.stats.start = new Date()
- this.stats.failedHooks = 0
recorder.startUnlessRunning()
event.dispatcher.emit(event.workers.before)
process.env.RUNS_WITH_WORKERS = 'true'
@@ -380,7 +372,7 @@ class Workers extends EventEmitter {
* @returns {Boolean}
*/
isFailed() {
- return (this.stats.failures || this.errors.length) > 0
+ return (Container.result().failures.length || this.errors.length) > 0
}
_listenWorkerEvents(worker) {
@@ -393,33 +385,37 @@ class Workers extends EventEmitter {
}
switch (message.event) {
- case event.all.failures:
- this.failuresLog = this.failuresLog.concat(message.data.failuresLog)
- this._appendStats(message.data.stats)
+ case event.all.result:
+ // we ensure consistency of result by adding tests in the very end
+ Container.result().addFailures(message.data.failures)
+ Container.result().addStats(message.data.stats)
+ message.data.tests.forEach(test => {
+ Container.result().addTest(deserializeTest(test))
+ })
break
case event.suite.before:
- this.emit(event.suite.before, repackTest(message.data))
+ this.emit(event.suite.before, deserializeSuite(message.data))
break
case event.test.before:
- this.emit(event.test.before, repackTest(message.data))
+ this.emit(event.test.before, deserializeTest(message.data))
break
case event.test.started:
- this.emit(event.test.started, repackTest(message.data))
+ this.emit(event.test.started, deserializeTest(message.data))
break
case event.test.failed:
- this.emit(event.test.failed, repackTest(message.data))
+ this.emit(event.test.failed, deserializeTest(message.data))
break
case event.test.passed:
- this.emit(event.test.passed, repackTest(message.data))
+ this.emit(event.test.passed, deserializeTest(message.data))
break
case event.test.skipped:
- this.emit(event.test.skipped, repackTest(message.data))
+ this.emit(event.test.skipped, deserializeTest(message.data))
break
case event.test.finished:
- this.emit(event.test.finished, repackTest(message.data))
+ this.emit(event.test.finished, deserializeTest(message.data))
break
case event.test.after:
- this.emit(event.test.after, repackTest(message.data))
+ this.emit(event.test.after, deserializeTest(message.data))
break
case event.step.finished:
this.emit(event.step.finished, message.data)
@@ -431,7 +427,7 @@ class Workers extends EventEmitter {
this.emit(event.step.passed, message.data)
break
case event.step.failed:
- this.emit(event.step.failed, message.data)
+ this.emit(event.step.failed, message.data, message.data.error)
break
}
})
@@ -450,33 +446,26 @@ class Workers extends EventEmitter {
_finishRun() {
event.dispatcher.emit(event.workers.after, { tests: this.workers.map(worker => worker.tests) })
- if (this.isFailed()) {
+ if (Container.result().hasFailed) {
process.exitCode = 1
} else {
process.exitCode = 0
}
- // removed this.finishedTests because in all /lib only first argument (!this.isFailed()) is used)
- this.emit(event.all.result, !this.isFailed())
- this.emit('end') // internal event
- }
- _appendStats(newStats) {
- this.stats.passes += newStats.passes
- this.stats.failures += newStats.failures
- this.stats.tests += newStats.tests
- this.stats.pending += newStats.pending
- this.stats.failedHooks += newStats.failedHooks
+ this.emit(event.all.result, Container.result())
+ event.dispatcher.emit(event.workers.result, Container.result())
+ this.emit('end') // internal event
}
printResults() {
- this.stats.end = new Date()
- this.stats.duration = this.stats.end - this.stats.start
+ const result = Container.result()
+ result.finish()
// Reset process for logs in main thread
output.process(null)
output.print()
- this.failuresLog = this.failuresLog
+ this.failuresLog = result.failures
.filter(log => log.length && typeof log[1] === 'number')
// mocha/lib/reporters/base.js
.map(([format, num, title, message, stack], i) => [format, i + 1, title, message, stack])
@@ -487,7 +476,8 @@ class Workers extends EventEmitter {
this.failuresLog.forEach(log => output.print(...log))
}
- output.result(this.stats.passes, this.stats.failures, this.stats.pending, ms(this.stats.duration), this.stats.failedHooks)
+ output.result(result.stats.passes, result.stats.failures, result.stats.pending, ms(result.duration), result.stats.failedHooks)
+
process.env.RUNS_WITH_WORKERS = 'false'
}
}
diff --git a/test/data/sandbox/configs/custom-reporter-plugin/codecept.conf.js b/test/data/sandbox/configs/custom-reporter-plugin/codecept.conf.js
new file mode 100644
index 000000000..536db958b
--- /dev/null
+++ b/test/data/sandbox/configs/custom-reporter-plugin/codecept.conf.js
@@ -0,0 +1,44 @@
+exports.config = {
+ tests: './*_test.js',
+ output: './output',
+ helpers: {
+ FileSystem: {},
+ },
+ include: {},
+ bootstrap: false,
+ plugins: {
+ customReporter: {
+ enabled: true,
+ onHookFinished: hook => {
+ console.log(`Hook Finished: ${hook.title}`)
+ },
+ onTestBefore: test => {
+ console.log(`Test Started: ${test.title}`)
+ },
+ onTestPassed: test => {
+ console.log(`Test Passed: ${test.title}`)
+ },
+ onTestFailed: (test, err) => {
+ console.log(`Test Failed: ${test.title}`)
+ console.log(`Error: ${err.message}`)
+ },
+ onTestSkipped: test => {
+ console.log(`Test Skipped: ${test.title}`)
+ },
+ onTestFinished: test => {
+ console.log(`Test Finished: ${test.title}`)
+ console.log(`Test Status: ${test.state}`)
+ console.log(`Test Error: ${test.err}`)
+ },
+ onResult: result => {
+ console.log('All tests completed')
+ console.log(`Total: ${result.stats.tests}`)
+ console.log(`Passed: ${result.stats.passes}`)
+ console.log(`Failed: ${result.stats.failures}`)
+ },
+ save: true,
+ },
+ },
+ mocha: {},
+ name: 'custom-reporter-plugin tests',
+}
diff --git a/test/data/sandbox/configs/custom-reporter-plugin/custom-reporter-plugin_test.js b/test/data/sandbox/configs/custom-reporter-plugin/custom-reporter-plugin_test.js
new file mode 100644
index 000000000..902c9786a
--- /dev/null
+++ b/test/data/sandbox/configs/custom-reporter-plugin/custom-reporter-plugin_test.js
@@ -0,0 +1,22 @@
+Feature('custom-reporter-plugin')
+
+BeforeSuite(({ I }) => {
+ I.say('I print before suite hook')
+})
+
+Before(({ I }) => {
+ I.say('I print before hook')
+})
+
+Scenario('test custom-reporter-plugin', ({ I }) => {
+ I.amInPath('.')
+ I.seeFile('this-file-should-not-exist.txt')
+})
+
+After(({ I }) => {
+ I.say('I print after hook')
+})
+
+AfterSuite(({ I }) => {
+ I.say('I print after suite hook')
+})
diff --git a/test/data/sandbox/configs/timeouts/suite_test.js b/test/data/sandbox/configs/timeouts/suite_test.js
index f53f2bfb7..07927bb8f 100644
--- a/test/data/sandbox/configs/timeouts/suite_test.js
+++ b/test/data/sandbox/configs/timeouts/suite_test.js
@@ -10,7 +10,7 @@ Scenario('timeout test in 0.5 #second', { timeout: 0.5 }, ({ I }) => {
I.waitForSleep(1000)
})
-Scenario('timeout step in 0.5', ({ I }) => {
+Scenario('timeout step in 0.5 old syntax', ({ I }) => {
I.limitTime(0.2).waitForSleep(100)
I.limitTime(0.2).waitForSleep(3000)
})
diff --git a/test/runner/before_failure_test.js b/test/runner/before_failure_test.js
index 56274a5da..6b6169d79 100644
--- a/test/runner/before_failure_test.js
+++ b/test/runner/before_failure_test.js
@@ -1,5 +1,6 @@
const path = require('path')
const exec = require('child_process').exec
+const debug = require('debug')('codeceptjs:test')
const runner = path.join(__dirname, '/../../bin/codecept.js')
const codecept_dir = path.join(__dirname, '/../data/sandbox')
@@ -9,6 +10,7 @@ describe('Failure in before', function () {
this.timeout(40000)
it('should skip tests that are skipped because of failure in before hook', done => {
exec(`${codecept_run}`, (err, stdout) => {
+ debug(stdout)
stdout.should.include('First test will be passed @grep')
stdout.should.include('Third test will be skipped @grep')
stdout.should.include('Fourth test will be skipped')
@@ -20,6 +22,7 @@ describe('Failure in before', function () {
it('should skip tests correctly with grep options', done => {
exec(`${codecept_run} --grep @grep`, (err, stdout) => {
+ debug(stdout)
stdout.should.include('First test will be passed @grep')
stdout.should.include('Third test will be skipped @grep')
stdout.should.include('1 passed, 1 failed, 1 failedHooks, 1 skipped')
diff --git a/test/runner/custom-reporter-plugin_test.js b/test/runner/custom-reporter-plugin_test.js
new file mode 100644
index 000000000..af475c1b3
--- /dev/null
+++ b/test/runner/custom-reporter-plugin_test.js
@@ -0,0 +1,41 @@
+const { expect } = require('expect')
+const exec = require('child_process').exec
+const { codecept_dir, codecept_run } = require('./consts')
+const debug = require('debug')('codeceptjs:tests')
+const fs = require('fs')
+const path = require('path')
+
+const config_run_config = (config, grep, verbose = false) => `${codecept_run} ${verbose ? '--verbose' : ''} --config ${codecept_dir}/configs/custom-reporter-plugin/${config} ${grep ? `--grep "${grep}"` : ''}`
+
+describe('CodeceptJS custom-reporter-plugin', function () {
+ this.timeout(10000)
+
+ it('should run custom-reporter-plugin test', done => {
+ exec(config_run_config('codecept.conf.js'), (err, stdout) => {
+ debug(stdout)
+
+ // Check for custom reporter output messages
+ expect(stdout).toContain('Hook Finished:')
+ expect(stdout).toContain('Test Started:')
+ expect(stdout).toContain('Test Failed:')
+ expect(stdout).toContain('Test Finished:')
+ expect(stdout).toContain('All tests completed')
+ expect(stdout).toContain('Total:')
+ expect(stdout).toContain('Passed:')
+
+ // Check if result file exists and has content
+ const resultFile = path.join(`${codecept_dir}/configs/custom-reporter-plugin`, 'output', 'result.json')
+ expect(fs.existsSync(resultFile)).toBe(true)
+
+ const resultContent = JSON.parse(fs.readFileSync(resultFile, 'utf8'))
+ expect(resultContent).toBeTruthy()
+ expect(resultContent).toHaveProperty('stats')
+ expect(resultContent.stats).toHaveProperty('tests')
+ expect(resultContent.stats).toHaveProperty('passes')
+ expect(resultContent.stats).toHaveProperty('failures')
+
+ expect(err).toBeTruthy()
+ done()
+ })
+ })
+})
diff --git a/test/runner/interface_test.js b/test/runner/interface_test.js
index daa40b342..72d8023bc 100644
--- a/test/runner/interface_test.js
+++ b/test/runner/interface_test.js
@@ -156,7 +156,6 @@ describe('CodeceptJS Interface', () => {
expect(output).toContain('OK')
expect(output).toContain('0 passed')
expect(output).toContain('2 skipped')
- console.log(err)
if (process.env.CI) {
// we notify that no tests were executed, which is not expected on CI
expect(err).toBeTruthy()
diff --git a/test/runner/run_workers_test.js b/test/runner/run_workers_test.js
index 87790c570..e8490fc1f 100644
--- a/test/runner/run_workers_test.js
+++ b/test/runner/run_workers_test.js
@@ -107,15 +107,13 @@ describe('CodeceptJS Workers Runner', function () {
exec(`${codecept_run} 2 --grep "Workers Failing"`, (err, stdout) => {
expect(stdout).toContain('CodeceptJS') // feature
expect(stdout).toContain('Running tests in 2 workers')
- // Test Scenario wasn't executed, but we can see it in logs because Before() hook was executed
- expect(stdout).not.toContain(' should not be executed ')
expect(stdout).toContain('"before each" hook: Before for "should not be executed"')
expect(stdout).not.toContain('this is running inside worker')
expect(stdout).toContain('failed')
expect(stdout).toContain('FAILURES')
expect(stdout).toContain('Workers Failing')
// Only 1 test is executed - Before hook in Workers Failing
- expect(stdout).toContain('β Workers Failing')
+ expect(stdout).toContain('β should not be executed')
expect(stdout).toContain('FAIL | 0 passed, 1 failed')
expect(err.code).toEqual(1)
done()
diff --git a/test/runner/step-enhancements_test.js b/test/runner/step-enhancements_test.js
index b988562e4..c68990f13 100644
--- a/test/runner/step-enhancements_test.js
+++ b/test/runner/step-enhancements_test.js
@@ -26,7 +26,7 @@ describe('CodeceptJS step-enhancements', function () {
debug(stdout)
expect(err).toBeTruthy()
expect(stdout).not.toContain('OK')
- expect(stdout).toContain('was interrupted on step timeout 100ms')
+ expect(stdout).toContain('was interrupted on timeout 100ms')
done()
})
})
diff --git a/test/runner/step_timeout_test.js b/test/runner/step_timeout_test.js
index e055c7ffc..7cd216aca 100644
--- a/test/runner/step_timeout_test.js
+++ b/test/runner/step_timeout_test.js
@@ -11,7 +11,7 @@ describe('CodeceptJS Steps', function () {
it('should stop test, when step timeout exceeded', done => {
exec(config_run_config('codecept-1000.conf.js', 'Default command timeout'), (err, stdout) => {
- expect(stdout).toContain('Action exceededByTimeout: 1500 was interrupted on step timeout 1000ms')
+ expect(stdout).toContain('Action exceededByTimeout: 1500 was interrupted on timeout 1000ms')
expect(stdout).toContain('0 passed, 1 failed')
expect(stdout).toContain(figures.cross + ' I.exceededByTimeout(1500)')
expect(err).toBeTruthy()
@@ -21,7 +21,7 @@ describe('CodeceptJS Steps', function () {
it('should respect custom timeout with regex', done => {
exec(config_run_config('codecept-1000.conf.js', 'Wait with longer timeout', debug_this_test), (err, stdout) => {
- expect(stdout).not.toContain('was interrupted on step timeout')
+ expect(stdout).not.toContain('was interrupted on timeout')
expect(stdout).toContain('1 passed')
expect(err).toBeFalsy()
done()
@@ -30,7 +30,7 @@ describe('CodeceptJS Steps', function () {
it('should respect custom timeout with full step name', done => {
exec(config_run_config('codecept-1000.conf.js', 'Wait with shorter timeout', debug_this_test), (err, stdout) => {
- expect(stdout).toContain('Action waitTadShorter: 750 was interrupted on step timeout 500ms')
+ expect(stdout).toContain('Action waitTadShorter: 750 was interrupted on timeout 500ms')
expect(stdout).toContain('0 passed, 1 failed')
expect(err).toBeTruthy()
done()
@@ -39,7 +39,7 @@ describe('CodeceptJS Steps', function () {
it('should not stop test, when step not exceeded', done => {
exec(config_run_config('codecept-2000.conf.js', 'Default command timeout'), (err, stdout) => {
- expect(stdout).not.toContain('was interrupted on step timeout')
+ expect(stdout).not.toContain('was interrupted on timeout')
expect(stdout).toContain('1 passed')
expect(err).toBeFalsy()
done()
@@ -48,7 +48,7 @@ describe('CodeceptJS Steps', function () {
it('should ignore timeout for steps with `wait*` prefix', done => {
exec(config_run_config('codecept-1000.conf.js', 'Wait command timeout'), (err, stdout) => {
- expect(stdout).not.toContain('was interrupted on step timeout')
+ expect(stdout).not.toContain('was interrupted on timeout')
expect(stdout).toContain('1 passed')
expect(err).toBeFalsy()
done()
@@ -57,7 +57,7 @@ describe('CodeceptJS Steps', function () {
it('step timeout should work nicely with step retries', done => {
exec(config_run_config('codecept-1000.conf.js', 'Rerun sleep'), (err, stdout) => {
- expect(stdout).not.toContain('was interrupted on step timeout')
+ expect(stdout).not.toContain('was interrupted on timeout')
expect(stdout).toContain('1 passed')
expect(err).toBeFalsy()
done()
diff --git a/test/runner/timeout_test.js b/test/runner/timeout_test.js
index a3c1ffefe..fc4fb3768 100644
--- a/test/runner/timeout_test.js
+++ b/test/runner/timeout_test.js
@@ -9,6 +9,9 @@ const config_run_config = (config, grep, verbose = false) => `${codecept_run} ${
describe('CodeceptJS Timeouts', function () {
this.timeout(10000)
+ // some times messages are different
+ this.retries(2);
+
it('should stop test when timeout exceeded', done => {
exec(config_run_config('codecept.conf.js', 'timed out'), (err, stdout) => {
debug_this_test && console.log(stdout)
@@ -33,7 +36,8 @@ describe('CodeceptJS Timeouts', function () {
it('should ignore timeouts if no timeout', done => {
exec(config_run_config('codecept.conf.js', 'no timeout test'), (err, stdout) => {
debug_this_test && console.log(stdout)
- expect(stdout).not.toContain('Timeout')
+ expect(stdout).not.toContain('TimeoutError')
+ expect(stdout).not.toContain('was interrupted on')
expect(err).toBeFalsy()
done()
})
@@ -52,7 +56,7 @@ describe('CodeceptJS Timeouts', function () {
it('should prefer step timeout', done => {
exec(config_run_config('codecept.conf.js', 'timeout step', true), (err, stdout) => {
debug_this_test && console.log(stdout)
- expect(stdout).toContain('was interrupted on step timeout 200ms')
+ expect(stdout).toContain('was interrupted on timeout 200ms')
expect(err).toBeTruthy()
done()
})
@@ -61,7 +65,7 @@ describe('CodeceptJS Timeouts', function () {
it('should keep timeout with steps', done => {
exec(config_run_config('codecept.timeout.conf.js', 'timeout step', true), (err, stdout) => {
debug_this_test && console.log(stdout)
- expect(stdout).toContain('was interrupted on step timeout 100ms')
+ expect(stdout).toContain('was interrupted on timeout 100ms')
expect(err).toBeTruthy()
done()
})
diff --git a/test/unit/plugin/retryFailedStep_test.js b/test/unit/plugin/retryFailedStep_test.js
index 2a13e2212..e8012868e 100644
--- a/test/unit/plugin/retryFailedStep_test.js
+++ b/test/unit/plugin/retryFailedStep_test.js
@@ -46,31 +46,6 @@ describe('retryFailedStep', () => {
return recorder.promise()
})
- it('should not retry failed step when tryTo plugin is enabled', async () => {
- tryTo()
- retryFailedStep({ retries: 2, minTimeout: 1 })
- event.dispatcher.emit(event.test.before, {})
- event.dispatcher.emit(event.step.started, { name: 'click' })
-
- try {
- let counter = 0
- await recorder.add(
- () => {
- counter++
- if (counter < 3) {
- throw new Error('Retry failed step is disabled when tryTo plugin is enabled')
- }
- },
- undefined,
- undefined,
- true,
- )
- return recorder.promise()
- } catch (e) {
- expect(e.message).equal('Retry failed step is disabled when tryTo plugin is enabled')
- }
- })
-
it('should not retry within', async () => {
retryFailedStep({ retries: 1, minTimeout: 1 })
event.dispatcher.emit(event.test.before, {})
diff --git a/test/unit/worker_test.js b/test/unit/worker_test.js
index f1cde53a6..811eeae87 100644
--- a/test/unit/worker_test.js
+++ b/test/unit/worker_test.js
@@ -28,8 +28,8 @@ describe('Workers', function () {
workers.run()
- workers.on(event.all.result, status => {
- expect(status).equal(false)
+ workers.on(event.all.result, result => {
+ expect(result.hasFailed).equal(true)
expect(passedCount).equal(5)
expect(failedCount).equal(3)
done()
@@ -63,9 +63,9 @@ describe('Workers', function () {
workers.run()
- workers.on(event.all.result, status => {
+ workers.on(event.all.result, result => {
expect(workers.getWorkers().length).equal(2)
- expect(status).equal(true)
+ expect(result.hasFailed).equal(false)
done()
})
})
@@ -100,8 +100,8 @@ describe('Workers', function () {
passedCount += 1
})
- workers.on(event.all.result, status => {
- expect(status).equal(false)
+ workers.on(event.all.result, result => {
+ expect(result.hasFailed).equal(true)
expect(passedCount).equal(3)
expect(failedCount).equal(2)
done()
@@ -135,9 +135,9 @@ describe('Workers', function () {
workers.run()
- workers.on(event.all.result, status => {
+ workers.on(event.all.result, result => {
expect(workers.getWorkers().length).equal(2)
- expect(status).equal(true)
+ expect(result.hasFailed).equal(false)
done()
})
})
@@ -170,9 +170,9 @@ describe('Workers', function () {
workers.run()
- workers.on(event.all.result, status => {
+ workers.on(event.all.result, result => {
expect(workers.getWorkers().length).equal(2)
- expect(status).equal(true)
+ expect(result.hasFailed).equal(false)
done()
})
})
@@ -199,8 +199,8 @@ describe('Workers', function () {
workers.run()
recorder.add(() => share({ fromMain: true }))
- workers.on(event.all.result, status => {
- expect(status).equal(true)
+ workers.on(event.all.result, result => {
+ expect(result.hasFailed).equal(false)
done()
})
})
@@ -258,9 +258,9 @@ describe('Workers', function () {
workers.run()
- workers.on(event.all.result, status => {
+ workers.on(event.all.result, result => {
expect(workers.getWorkers().length).equal(8)
- expect(status).equal(true)
+ expect(result.hasFailed).equal(false)
done()
})
})
diff --git a/typings/jsdoc.conf.js b/typings/jsdoc.conf.js
index ca1d115bf..6b38e805f 100644
--- a/typings/jsdoc.conf.js
+++ b/typings/jsdoc.conf.js
@@ -5,6 +5,7 @@ module.exports = {
'./lib/actor.js',
'./lib/codecept.js',
'./lib/config.js',
+ './lib/result.js',
'./lib/container.js',
'./lib/data/table.js',
'./lib/data/dataTableArgument.js',