Skip to content
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/advanced/api/test-case.md
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ Checks if the test did not fail the suite. If the test is not finished yet or wa
function meta(): TaskMeta
```

Custom metadata that was attached to the test during its execution. The meta can be attached by assigning a property to the `ctx.task.meta` object during a test run:
Custom [metadata](/advanced/metadata) that was attached to the test during its execution. The meta can be attached by assigning a property to the `ctx.task.meta` object during a test run:

```ts {3,6}
import { test } from 'vitest'
Expand Down
32 changes: 32 additions & 0 deletions docs/advanced/api/test-module.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,33 @@ function state(): TestModuleState

Works the same way as [`testSuite.state()`](/advanced/api/test-suite#state), but can also return `queued` if module wasn't executed yet.

## meta <Version>3.1.0</Version> {#meta}

```ts
function meta(): TaskMeta
```

Custom [metadata](/advanced/metadata) that was attached to the module during its execution or collection. The meta can be attached by assigning a property to the `task.meta` object during a test run:

```ts {5,10}
import { test } from 'vitest'

describe('the validation works correctly', (task) => {
// assign "decorated" during collection
task.file.meta.decorated = false

test('some test', ({ task }) => {
// assign "decorated" during test run, it will be available
// only in onTestCaseReady hook
task.file.meta.decorated = false
})
})
```

:::tip
If metadata was attached during collection (outside of the `test` function), then it will be available in [`onTestModuleCollected`](./reporters#ontestmodulecollected) hook in the custom reporter.
:::

## diagnostic

```ts
Expand Down Expand Up @@ -63,5 +90,10 @@ interface ModuleDiagnostic {
* Accumulated duration of all tests and hooks in the module.
*/
readonly duration: number
/**
* The amount of memory used by the module in bytes.
* This value is only available if the test was executed with `logHeapUsage` flag.
*/
readonly heap: number | undefined
}
```
27 changes: 27 additions & 0 deletions docs/advanced/api/test-suite.md
Original file line number Diff line number Diff line change
Expand Up @@ -190,3 +190,30 @@ describe('collection failed', () => {
::: warning
Note that errors are serialized into simple objects: `instanceof Error` will always return `false`.
:::

## meta <Version>3.1.0</Version> {#meta}

```ts
function meta(): TaskMeta
```

Custom [metadata](/advanced/metadata) that was attached to the suite during its execution or collection. The meta can be attached by assigning a property to the `task.meta` object during a test run:

```ts {5,10}
import { test } from 'vitest'

describe('the validation works correctly', (task) => {
// assign "decorated" during collection
task.meta.decorated = false

test('some test', ({ task }) => {
// assign "decorated" during test run, it will be available
// only in onTestCaseReady hook
task.suite.meta.decorated = false
})
})
```

:::tip
If metadata was attached during collection (outside of the `test` function), then it will be available in [`onTestModuleCollected`](./reporters#ontestmodulecollected) hook in the custom reporter.
:::
215 changes: 123 additions & 92 deletions packages/vitest/src/node/reporters/base.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import type { File, Task, TaskResultPack } from '@vitest/runner'
import type { File, Task } from '@vitest/runner'
import type { ErrorWithDiff, UserConsoleLog } from '../../types/general'
import type { Vitest } from '../core'
import type { Reporter } from '../types/reporter'
import type { TestCase, TestModule, TestResult, TestSuite } from './reported-tasks'
import type { TestCase, TestCollection, TestModule, TestModuleState, TestResult, TestSuite, TestSuiteState } from './reported-tasks'
import { performance } from 'node:perf_hooks'
import { getFullName, getSuites, getTestName, getTests, hasFailed } from '@vitest/runner/utils'
import { toArray } from '@vitest/utils'
Expand All @@ -24,7 +24,7 @@ export abstract class BaseReporter implements Reporter {
start = 0
end = 0
watchFilters?: string[]
failedUnwatchedFiles: Task[] = []
failedUnwatchedFiles: TestModule[] = []
isTTY: boolean
ctx: Vitest = undefined!
renderSucceed = false
Expand Down Expand Up @@ -83,6 +83,8 @@ export abstract class BaseReporter implements Reporter {
if (testModule.state() === 'failed') {
this.logFailedTask(testModule.task)
}

this.printTestModule(testModule)
}

private logFailedTask(task: Task) {
Expand All @@ -93,121 +95,150 @@ export abstract class BaseReporter implements Reporter {
}
}

onTaskUpdate(packs: TaskResultPack[]): void {
for (const pack of packs) {
const task = this.ctx.state.idMap.get(pack[0])

if (task) {
this.printTask(task)
}
}
}

/**
* Callback invoked with a single `Task` from `onTaskUpdate`
*/
protected printTask(task: Task): void {
if (
!('filepath' in task)
|| !task.result?.state
|| task.result?.state === 'run'
|| task.result?.state === 'queued') {
protected printTestModule(testModule: TestModule): void {
const moduleState = testModule.state()
if (moduleState === 'queued' || moduleState === 'pending') {
return
}

const suites = getSuites(task)
const allTests = getTests(task)
const failed = allTests.filter(t => t.result?.state === 'fail')
const skipped = allTests.filter(t => t.mode === 'skip' || t.mode === 'todo')
let testsCount = 0
let failedCount = 0
let skippedCount = 0

let state = c.dim(`${allTests.length} test${allTests.length > 1 ? 's' : ''}`)
// delaying logs to calculate the test stats first
// which minimizes the amount of for loops
const logs: string[] = []
const originalLog = this.log.bind(this)
this.log = (msg: string) => logs.push(msg)

if (failed.length) {
state += c.dim(' | ') + c.red(`${failed.length} failed`)
}
const visit = (suiteState: TestSuiteState, children: TestCollection) => {
for (const child of children) {
if (child.type === 'suite') {
const suiteState = child.state()

// Skipped suites are hidden when --hideSkippedTests, print otherwise
if (!this.ctx.config.hideSkippedTests || suiteState !== 'skipped') {
this.printTestSuite(child)
}

if (skipped.length) {
state += c.dim(' | ') + c.yellow(`${skipped.length} skipped`)
visit(suiteState, child.children)
}
else {
const testResult = child.result()

testsCount++
if (testResult.state === 'failed') {
failedCount++
}
else if (testResult.state === 'skipped') {
skippedCount++
}

if (this.ctx.config.hideSkippedTests && suiteState === 'skipped') {
// Skipped suites are hidden when --hideSkippedTests
continue
}

this.printTestCase(moduleState, child)
}
}
}

let suffix = c.dim('(') + state + c.dim(')') + this.getDurationPrefix(task)
visit(moduleState, testModule.children)

if (this.ctx.config.logHeapUsage && task.result.heap != null) {
suffix += c.magenta(` ${Math.floor(task.result.heap / 1024 / 1024)} MB heap used`)
}
this.log = originalLog

let title = getStateSymbol(task)
this.log(this.getModuleLog(testModule, {
tests: testsCount,
failed: failedCount,
skipped: skippedCount,
}))
logs.forEach(log => this.log(log))
}

if (task.meta.typecheck) {
title += ` ${c.bgBlue(c.bold(' TS '))}`
protected printTestCase(moduleState: TestModuleState, test: TestCase): void {
const testResult = test.result()

const { duration, retryCount, repeatCount } = test.diagnostic() || {}
const padding = this.getTestIndentation(test.task)
let suffix = this.getDurationPrefix(test.task)

if (retryCount != null && retryCount > 0) {
suffix += c.yellow(` (retry x${retryCount})`)
}

if (task.projectName) {
title += ` ${formatProjectName(task.projectName, '')}`
if (repeatCount != null && repeatCount > 0) {
suffix += c.yellow(` (repeat x${repeatCount})`)
}

this.log(` ${title} ${task.name} ${suffix}`)
if (testResult.state === 'failed') {
this.log(c.red(` ${padding}${taskFail} ${this.getTestName(test.task, c.dim(' > '))}`) + suffix)

for (const suite of suites) {
if (this.ctx.config.hideSkippedTests && (suite.mode === 'skip' || suite.result?.state === 'skip')) {
// Skipped suites are hidden when --hideSkippedTests
continue
}
// print short errors, full errors will be at the end in summary
testResult.errors.forEach((error) => {
const message = this.formatShortError(error)

const tests = suite.tasks.filter(task => task.type === 'test')
if (message) {
this.log(c.red(` ${padding}${message}`))
}
})
}

if (!('filepath' in suite)) {
this.printSuite(suite)
}
// also print slow tests
else if (duration && duration > this.ctx.config.slowTestThreshold) {
this.log(` ${padding}${c.yellow(c.dim(F_CHECK))} ${this.getTestName(test.task, c.dim(' > '))} ${suffix}`)
}

for (const test of tests) {
const { duration, retryCount, repeatCount } = test.result || {}
const padding = this.getTestIndentation(test)
let suffix = this.getDurationPrefix(test)
else if (this.ctx.config.hideSkippedTests && (testResult.state === 'skipped')) {
// Skipped tests are hidden when --hideSkippedTests
}

if (retryCount != null && retryCount > 0) {
suffix += c.yellow(` (retry x${retryCount})`)
}
// also print skipped tests that have notes
else if (testResult.state === 'skipped' && testResult.note) {
this.log(` ${padding}${getStateSymbol(test.task)} ${this.getTestName(test.task, c.dim(' > '))}${c.dim(c.gray(` [${testResult.note}]`))}`)
}

if (repeatCount != null && repeatCount > 0) {
suffix += c.yellow(` (repeat x${repeatCount})`)
}
else if (this.renderSucceed || moduleState === 'failed') {
this.log(` ${padding}${getStateSymbol(test.task)} ${this.getTestName(test.task, c.dim(' > '))}${suffix}`)
}
}

if (test.result?.state === 'fail') {
this.log(c.red(` ${padding}${taskFail} ${this.getTestName(test, c.dim(' > '))}`) + suffix)
private getModuleLog(testModule: TestModule, counts: {
tests: number
failed: number
skipped: number
}): string {
let state = c.dim(`${counts.tests} test${counts.tests > 1 ? 's' : ''}`)

// print short errors, full errors will be at the end in summary
test.result?.errors?.forEach((error) => {
const message = this.formatShortError(error)
if (counts.failed) {
state += c.dim(' | ') + c.red(`${counts.failed} failed`)
}

if (message) {
this.log(c.red(` ${padding}${message}`))
}
})
}
if (counts.skipped) {
state += c.dim(' | ') + c.yellow(`${counts.skipped} skipped`)
}

// also print slow tests
else if (duration && duration > this.ctx.config.slowTestThreshold) {
this.log(` ${padding}${c.yellow(c.dim(F_CHECK))} ${this.getTestName(test, c.dim(' > '))} ${suffix}`)
}
let suffix = c.dim('(') + state + c.dim(')') + this.getDurationPrefix(testModule.task)

else if (this.ctx.config.hideSkippedTests && (test.mode === 'skip' || test.result?.state === 'skip')) {
// Skipped tests are hidden when --hideSkippedTests
}
const diagnostic = testModule.diagnostic()
if (diagnostic.heap != null) {
suffix += c.magenta(` ${Math.floor(diagnostic.heap / 1024 / 1024)} MB heap used`)
}

// also print skipped tests that have notes
else if (test.result?.state === 'skip' && test.result.note) {
this.log(` ${padding}${getStateSymbol(test)} ${this.getTestName(test)}${c.dim(c.gray(` [${test.result.note}]`))}`)
}
let title = getStateSymbol(testModule.task)

else if (this.renderSucceed || failed.length > 0) {
this.log(` ${padding}${getStateSymbol(test)} ${this.getTestName(test, c.dim(' > '))}${suffix}`)
}
}
if (testModule.meta().typecheck) {
title += ` ${c.bgBlue(c.bold(' TS '))}`
}

if (testModule.project.name) {
title += ` ${formatProjectName(testModule.project.name, '')}`
}

return ` ${title} ${testModule.task.name} ${suffix}`
}

protected printSuite(_task: Task): void {
protected printTestSuite(_suite: TestSuite): void {
// Suite name is included in getTestName by default
}

Expand Down Expand Up @@ -262,8 +293,8 @@ export abstract class BaseReporter implements Reporter {

onWatcherRerun(files: string[], trigger?: string): void {
this.watchFilters = files
this.failedUnwatchedFiles = this.ctx.state.getFiles().filter(file =>
!files.includes(file.filepath) && hasFailed(file),
this.failedUnwatchedFiles = this.ctx.state.getTestModules().filter(testModule =>
!files.includes(testModule.task.filepath) && testModule.state() === 'failed',
)

// Update re-run count for each file
Expand Down Expand Up @@ -296,8 +327,8 @@ export abstract class BaseReporter implements Reporter {

this.log('')

for (const task of this.failedUnwatchedFiles) {
this.printTask(task)
for (const testModule of this.failedUnwatchedFiles) {
this.printTestModule(testModule)
}

this._timeStart = formatTimeString(new Date())
Expand Down Expand Up @@ -405,7 +436,7 @@ export abstract class BaseReporter implements Reporter {
this.log()

const affectedFiles = [
...this.failedUnwatchedFiles,
...this.failedUnwatchedFiles.map(m => m.task),
...files,
]
const tests = getTests(affectedFiles)
Expand Down
Loading
Loading