Files
react/scripts/jest/test-framework-setup.js
Sebastian Markbåge 9011182c55 Configure Jest with Stack and Fiber as separate projects (#10214)
* Disable Fiber specific test run in CI

This disables the comparison against previously recorded test. Instead,
we'll rely on jest failures to fail tests.

* Extract jest config into two separate projects for Fiber and Stack

Allows us to run both in the same jest run. The setupMocks file is forked into
specific environment configuration for each project. This replaces the
environment variable.

I used copy pasta here to make it clear. We can abstract this later. It's clear
to me that simply extracting shared stuff is not the best way to abstract this.
setupMocks for example didn't need all the code in both branches.

I think that some of the stuff that is shared such as error message extracting
etc. should probably be lifted out into a stand-alone jest project instead of
being shared.

* Fix class equivalence test

There's a behavior change when projects are used which makes
setupTestFrameworkScriptFile not override the normal config.

This test should probably just move to a separate CI script or something
less hacky.

* Only run Fiber tests with scripts/fiber/record-tests
2017-07-19 10:35:45 -07:00

85 lines
2.4 KiB
JavaScript

'use strict';
if (process.env.REACT_CLASS_EQUIVALENCE_TEST) {
// Inside the class equivalence tester, we have a custom environment, let's
// require that instead.
require('./setupSpecEquivalenceReporter.js');
} else {
var env = jasmine.getEnv();
var callCount = 0;
var oldError = console.error;
var newError = function() {
callCount++;
oldError.apply(this, arguments);
};
console.error = newError;
// TODO: Stop using spyOn in all the test since that seem deprecated.
// Legacy upgrade path from https://github.com/facebook/jest/blob/21a2b7aaee366af7ed87ae78c5b2d58cf3f5fb86/packages/jest-matchers/src/spy_matchers.js#L160
const isSpy = spy => spy.calls && typeof spy.calls.count === 'function';
env.beforeEach(() => {
callCount = 0;
jasmine.addMatchers({
toBeReset() {
return {
compare(actual) {
// TODO: Catch test cases that call spyOn() but don't inspect the mock
// properly.
if (actual !== newError && !isSpy(actual)) {
return {
pass: false,
message: () =>
'Test did not tear down console.error mock properly.',
};
}
return {pass: true};
},
};
},
toNotHaveBeenCalled() {
return {
compare(actual) {
return {
pass: callCount === 0,
message: () =>
'Expected test not to warn. If the warning is expected, mock ' +
"it out using spyOn(console, 'error'); and test that the " +
'warning occurs.',
};
},
};
},
});
});
env.afterEach(() => {
expect(console.error).toBeReset();
expect(console.error).toNotHaveBeenCalled();
});
function wrapDevMatcher(obj, name) {
const original = obj[name];
obj[name] = function devMatcher() {
try {
original.apply(this, arguments);
} catch (e) {
global.__hadDevFailures = e.stack;
}
};
}
const expectDev = function expectDev(actual) {
const expectation = expect(actual);
if (global.__suppressDevFailures) {
Object.keys(expectation).forEach(name => {
wrapDevMatcher(expectation, name);
wrapDevMatcher(expectation.not, name);
});
}
return expectation;
};
global.expectDev = expectDev;
}