(Apologies if this has been asked and answered previously - I tried to search for a solution but was not able to find anything).
I'm upgrading an app from webpack 4 → 5.
The app has two entrypoints: "app" and "admin".
The goal is to reproduce the same outputs that webpack 4 did, specifically:
/dist/admin.js // admin chunk
/dist/admin~app.js // shared chunk for app + admin
/dist/app.js // app chunk
/dist/runtime.js // webpack runtime chunk (from runtimeChunk: "single")
/dist/vendor.js // vendor chunk (node_modules)
The webpack 4 the config was:
entry: {
app: "app",
admin: "admin"
},
output: {
filename: "[name].js"
},
optimization: {
moduleIds: "hashed",
splitChunks: {
chunks: "all",
cacheGroups: {
vendor: {
name: "vendor",
test: /[\\/]node_modules[\\/]/
}
}
},
runtimeChunk: "single"
}
This produces the expected output.
In webpack 5, we can now remove output.filename and optimization.moduleIds since those are the defaults, but everything else largely remains the same:
entry: {
app: "app",
admin: "admin"
},
optimization: {
splitChunks: {
chunks: "all",
cacheGroups: {
vendor: {
name: "vendor",
test: /[\\/]node_modules[\\/]/
}
}
},
runtimeChunk: "single"
}
However, this produces the following output:
/dist/121.js // shared chunk for app + admin
/dist/admin.js // admin chunk
/dist/app.js // app chunk
/dist/runtime.js // runtime chunk
/dist/vendor.js // vendor chunk
Note that the shared chunk that was previously named admin~app.js now has (what appears to be) a hashed module id as the name (121.js).
I am guessing that the difference is perhaps related to this change in the docs:
In webpack 4, the [name] placeholder is described as "The module name"
In webpack 5, the [name] placeholder is described as "The name of the chunk, if set, otherwise the ID of the chunk"
I know it's pedantic and it shouldn't really matter whether the file is called admin~app or 121 but is there a way that I can have the shared chunk named as it was in webpack 4?
I use a version of the code in this example in the docs: https://webpack.js.org/plugins/split-chunks-plugin/#splitchunksname
This might work for you but I actually don't split out the node modules and just have a single cache group that includes vendor and non-vendor chunks but I use the same special name function. I have no idea how or why it works but it seems to do what I mostly want.
{
//...
optimization: {
// Factor the webpack runtime out into a single dependency rather than
// baking it into each entry point.
// Include it before the other deps for each entry point.
runtimeChunk: 'single',
splitChunks: {
cacheGroups: {
vendor: {
test: /[\\/]node_modules[\\/]/,
name: function (module, chunks, cacheGroupKey) {
const moduleFileName = module
.identifier()
.split('/')
.reduceRight((item) => item);
// This is taken from the documentation but there
// seems to be no great explaination but it seems
// to be what we need, joining the entry point names
// together by ~. We can then determine which chunk/file
// needs to be loaded by each entry point.
const allChunksNames = chunks.map((item) => item.name).join('~');
return `${cacheGroupKey}-${allChunksNames}-${moduleFileName}`;
},
chunks: 'all',
priority: -10,
reuseExistingChunk: true
},
commons: {
priority: -20,
reuseExistingChunk: true,
name: function (module, chunks, cacheGroupKey) {
const moduleFileName = module
.identifier()
.split('/')
.reduceRight((item) => item);
// This is taken from the documentation but there
// seems to be no great explaination but it seems
// to be what we need, joining the entry point names
// together by ~. We can then determine which chunk/file
// needs to be loaded by each entry point.
const allChunksNames = chunks.map((item) => item.name).join('~');
return `${cacheGroupKey}-${allChunksNames}-${moduleFileName}`;
},
// Automatically split all code as needed into separate files.
chunks: 'all'
},
}
}
}
}
My config looks more like this:
{
//...
optimization: {
// Factor the webpack runtime out into a single dependency rather than
// baking it into each entry point.
// Include it before the other deps for each entry point.
runtimeChunk: 'single',
splitChunks: {
commons: {
name: function (module, chunks, cacheGroupKey) {
const moduleFileName = module
.identifier()
.split('/')
.reduceRight((item) => item);
// This is taken from the documentation but there
// seems to be no great explaination but it seems
// to be what we need, joining the entry point names
// together by ~. We can then determine which chunk/file
// needs to be loaded by each entry point.
const allChunksNames = chunks.map((item) => item.name).join('~');
return `${cacheGroupKey}-${allChunksNames}-${moduleFileName}`;
},
// Automatically split all code as needed into separate files.
chunks: 'all'
},
}
}
}
}
Related
// webpack image rule
{
test: /\.(png|jpe?g|gif|bpm|svg|webp)(\?.*)?$/,
type: 'asset',
parser: {
dataUrlCondition: function (source, { module }) {
if (/\.less|vue&type=style/.test(module.issuer.resource)) {
return true
}
return source.length < 8092
}
},
...
},
runing the config, I get this:
[DEP_WEBPACK_MODULE_ISSUER] DeprecationWarning: Module.issuer: Use new ModuleGraph API
it means need change code:
module.issuer.resource => compilation.moduleGraph.getIssuer(module).resource.
but parser.dataUrlCondition type is (source: Buffer, { module: Module, filename: string}) => boolean
if there a way to get current compilation that contain moduleGraph property?
when I update webpack 4 to 5, the error exits.
I have a webpackDevServer.js which include the error message 'error'
// webpackDevServer.js
module.exports = function(proxy, allowedHost) {
return {
before(app, server) {
if (fs.existsSync(paths.proxySetup)) {
// This registers user provided middleware for proxy reasons
require(paths.proxySetup)(app);
}
// This lets us fetch source contents from webpack for the error overlay
app.use(evalSourceMapMiddleware(server));
// This lets us open files from the runtime error overlay.
app.use(errorOverlayMiddleware());
// This service worker file is effectively a 'no-op' that will reset any
// previous service worker registered for the same host:port combination.
// We do this in development to avoid hitting the production cache if
// it used the same host and port.
// https://github.com/facebook/create-react-app/issues/2272#issuecomment-302832432
app.use(noopServiceWorkerMiddleware());
},
};
};
I use the above file in a start.js file, when I run the project, I type node scripts/start.js
// start.js
...
const createDevServerConfig = require('../config/webpackDevServer.config');
...
const serverConfig = createDevServerConfig(
proxyConfig,
urls.lanUrlForConfig
);
const devServer = new WebpackDevServer(compiler, serverConfig);
then it throws an error
configuration has an unknown property 'before'. These properties are valid:
object { bonjour?, client?, compress?, dev?, firewall?, headers?, historyApiFallback?, host?, hot?, http2?, https?, injectClient?, injectHot?, liveReload?, onAfterSetupMiddleware?, onBeforeSetupMiddleware?, onListening?, open?, openPage?, overlay?, port?, proxy?, public?, setupExitSignals?, static?, stdin?, transportMode?, useLocalIp? }
here is my package.json
"webpack": "^5.20.2",
"webpack-dev-server": "^4.0.0-beta.0",
"webpack-manifest-plugin": "2.0.4",
"workbox-webpack-plugin": "^6.1.0"
You have to change before to the onBeforeSetupMiddleware. Link with migration description from v3 to v4. https://github.com/webpack/webpack-dev-server/blob/master/migration-v4.md
In case, something will change on the migration guide, details are attached below
v3:
module.exports = {
devServer: {
after: function (app, server, compiler) {
app.get("/some/path", function (req, res) {
res.json({ custom: "response" });
});
},
},
};
v4:
module.exports = {
devServer: {
onAfterSetupMiddleware: function (devServer) {
devServer.app.get("/some/path", function (req, res) {
res.json({ custom: "response" });
});
},
},
};
fxxk, I'm stupid, when i search some key word (eg: onBeforeSetupMiddleware), I found the github of webpack-dev-server which tell the changes in new version 4.0.0 beta. https://github.com/webpack/webpack-dev-server/releases
Currently Jest is failing the tests because it cannot find the module called inside a component:
FAIL tests/Unit/VHeaderBar.spec.ts
● Test suite failed to run
Cannot find module '##/public/assets/images/placeholder.png' from 'VHeaderBar.vue'
at Resolver.resolveModule (node_modules/jest-runtime/node_modules/jest-resolve/build/index.js:221:17)
at src/components/VHeaderBar.vue:687:18
at Object.<anonymous> (src/components/VHeaderBar.vue:749:3)
Case
In NuxtJs the ## signs refer to the root directory, because in the end solution we want to store images in the public folder or storage folder, which is located in the root of the project.
When running tests jest checks the src folder, then tries to mount the images stored from the root and can't find them.
I have tried many different ways to fix this issue, but can't seem to find the solution.
Here's a shortlist of what I already tried:
Changing regex to check for image files and lead it to the correct folder using the moduleNameMapper option in the Jest config file.
I read something on Stack about using a "mock" folder that exports the images files through javascript, but that didn't work.
Using the modulePaths option in the Jest config file.
Creating an alias in the tsconfig.js for the assets folder and using that in the moduleNameMapper
Tried a different approach in the VueJS component and test file to load assets, which broke the compiling process (so I reverted that).
Current Jest Config file
module.exports = {
moduleFileExtensions: [
"ts",
"tsx",
"vue",
"js",
"json"
],
watchman: false,
moduleNameMapper: {
"/\.(gif|jpg|jpeg|tiff|png)$/i": "<rootDir>/public/assets/images/$1",
"^#/(.*)$": "<rootDir>/src/$1",
"^~/(.*)$": "<rootDir>/src/$1",
"^~~/(.*)$": "<rootDir>/src/$1"
},
transform: {
// process js with `babel-jest`
"^.+\\.js$": "<rootDir>/node_modules/babel-jest",
// process `*.vue` files with `vue-jest`
".*\\.(vue)$": "<rootDir>/node_modules/vue-jest",
// process `*.ts` files with `ts-jest`
"^.+\\.(ts|tsx)$": "<rootDir>/node_modules/ts-jest",
},
snapshotSerializers: [
"<rootDir>/node_modules/jest-serializer-vue"
],
collectCoverage: true,
collectCoverageFrom: [
"<rootDir>/src/components/**/*.vue",
"<rootDir>/src/pages/**/*.vue",
"<rootDir>/src/layouts/**/*.vue"
],
testMatch: [
'**/tests/Unit/**/*.spec.(js|jsx|ts|tsx)|**/__tests__/*.(js|jsx|ts|tsx)'
],
}
Current folder structure (only folders we use for the test)
project folder
- public
-- assets
--- **images**
- src
-- components
--- **mounted component** (works)
- tests
-- Unit
--- mountedComponent.spec.ts
Any suggestions?
Do I fix the jest.config?
Is there something wrong with the syntax?
Do I have to fix the tsconfig?
I've had a similar issue and it goes down to typescript not being able to import that file.
I've solved it by adding file type definition to files.d.ts:
declare module "*.pdf" {
const file: Buffer;
export default file;
}
declare module "*.jpeg" {
const src: string;
export default src;
}
declare module "*.png" {
const src: string;
export default src;
}
Referring to this file in tsconfig.json:
{
"compilerOptions": {
/* ... */
},
"files": ["./src/#types/files.d.ts"],
"include": ["src/**/*"],
"exclude": ["node_modules"]
}
And adding file transforms to jest.config.js:
module.exports = {
preset: "ts-jest",
testEnvironment: "node",
roots: ["<rootDir>/src/"],
moduleNameMapper: {
"#app/(.*)": "<rootDir>/src/$1",
"#lib/(.*)": "<rootDir>/src/lib/$1",
},
transform: { // Transforms here
"\\.(gql|graphql)$": "#jagi/jest-transform-graphql",
"\\.(html|html|txt|pem|key)$": "./jest-transform-text.js",
"\\.(p12|pdf|otf|ttf)$": "./jest-transform-buffer.js",
"^(?!.*\\.(js|jsx|ts|tsx|css|json)$)": "<rootDir>/config/jest/fileTransform.js"
},
coverageReporters: ["text", "lcov"],
};
Examples of transform files:
// jest-transform-buffer.js
"use strict";
const fs = require("fs");
module.exports = {
process(src, filename) {
const data = fs.readFileSync(filename, "hex");
return (
'module.exports=Buffer.from("' +
data +
'","hex");module.exports.default=module.exports;'
);
},
};
And for images (or other files where you only need a filename) from create react app:
'use strict';
const path = require('path');
const camelcase = require('camelcase');
// This is a custom Jest transformer turning file imports into filenames.
// http://facebook.github.io/jest/docs/en/webpack.html
module.exports = {
process(src, filename) {
const assetFilename = JSON.stringify(path.basename(filename));
if (filename.match(/\.svg$/)) {
// Based on how SVGR generates a component name:
// https://github.com/smooth-code/svgr/blob/01b194cf967347d43d4cbe6b434404731b87cf27/packages/core/src/state.js#L6
const pascalCaseFilename = camelcase(path.parse(filename).name, {
pascalCase: true,
});
const componentName = `Svg${pascalCaseFilename}`;
return `const React = require('react');
module.exports = {
__esModule: true,
default: ${assetFilename},
ReactComponent: React.forwardRef(function ${componentName}(props, ref) {
return {
$$typeof: Symbol.for('react.element'),
type: 'svg',
ref: ref,
key: null,
props: Object.assign({}, props, {
children: ${assetFilename}
})
};
}),
};`;
}
return `module.exports = ${assetFilename};`;
},
};
I am making a Webpack 4 plugin for fun and to try to understand its internals. The idea is simple:
Parse an HTML template file into a tree;
Get the asset paths from <img src="..."> and <link href="...">;
Add the assets to dependencies to load them through the file-loader;
Get the path emitted from file-loader(which might include a hash)and fix the nodes in the tree;
Emit the final HTML string into a file.
So far, I am stuck at step 4. Parsing the template and extracting the asset paths was easy thanks to parse5, to load the assets, I used the PrefetchPlugin but now I don't know how to get the result from file-loader.
I need to load the result because it generates a hash and might change the location of the asset:
{
exclude: /\.(css|jsx?|mjs)$/,
use: [{
loader: 'file-loader',
options: {
name: '[name].[ext]?[sha512:hash:base64:8]`',
},
}],
}
Not only that, but I want to use the url-loader later which might generate the asset encoded. I am trying to get the result from the loader at tapAfterCompile.
The current source code for the plugin is as follows:
import debug from 'debug'
import prettyFormat from 'pretty-format'
import validateOptions from 'schema-utils'
import {dirname, resolve} from 'path'
import {html as beautifyHtml} from 'js-beautify'
import {minify as minifyHtml} from 'html-minifier'
import {parse, serialize} from 'parse5'
import {PrefetchPlugin} from 'webpack'
import {readFileSync} from 'fs'
let log = debug('bb:config:webpack:plugin:html')
const PLUGIN_NAME = 'HTML Plugin'
/**
* This schema is used to validate the plugin’s options, right now, all it does
* is requiring the template property.
*/
const OPTIONS_SCHEMA = {
additionalProperties: false,
type: 'object',
properties: {
minify: {
type: 'boolean',
},
template: {
type: 'string',
},
},
required: ['template'],
}
/**
* Extract an attribute’s value from the node; Returns undefined if the
* attribute is not found.
*/
function getAttributeValue(node, attributeName) {
for (let attribute of node.attrs) {
if (attribute.name === attributeName)
return attribute.value
}
return undefined
}
/**
* Update a node’s attribute value.
*/
function setAttributeValue(node, attributeName, value) {
for (let attribute of node.attrs) {
if (attribute.name === attributeName)
attribute.value = value
}
}
/**
* Recursively walks the parsed tree. It should work in 99.9% of the cases but
* it needs to be replaced with a non recursive version.
*/
function * walk(node) {
yield node
if (!node.childNodes)
return
for (let child of node.childNodes)
yield * walk(child)
}
/**
* Actual Webpack plugin that generates an HTML from a template, add the script
* bundles and and loads any local assets referenced in the code.
*/
export default class SpaHtml {
/**
* Options passed to the plugin.
*/
options = null
/**
* Parsed tree of the template.
*/
tree = null
constructor(options) {
this.options = options
validateOptions(OPTIONS_SCHEMA, this.options, PLUGIN_NAME)
}
/**
* Webpack will call this method to allow the plugin to hook to the
* compiler’s events.
*/
apply(compiler) {
let {hooks} = compiler
hooks.afterCompile.tapAsync(PLUGIN_NAME, this.tapAfterCompile.bind(this))
hooks.beforeRun.tapAsync(PLUGIN_NAME, this.tapBeforeRun.bind(this))
}
/**
* Return the extracted the asset paths from the tree.
*/
* extractAssetPaths() {
log('Extracting asset paths...')
const URL = /^(https?:)?\/\//
const TEMPLATE_DIR = dirname(this.options.template)
for (let node of walk(this.tree)) {
let {tagName} = node
if (!tagName)
continue
let assetPath
switch (tagName) {
case 'link':
assetPath = getAttributeValue(node, 'href')
break
case 'img':
assetPath = getAttributeValue(node, 'src')
break
}
// Ignore empty paths and URLs.
if (!assetPath || URL.test(assetPath))
continue
const RESULT = {
context: TEMPLATE_DIR,
path: assetPath,
}
log(`Asset found: ${prettyFormat(RESULT)}`)
yield RESULT
}
log('Done extracting assets.')
}
/**
* Returns the current tree as a beautified or minified HTML string.
*/
getHtmlString() {
let serialized = serialize(this.tree)
// We pass the serialized HTML through the minifier to remove any
// unnecessary whitespace that could affect the beautifier. When we are
// actually trying to minify, comments will be removed too. Options can be
// found in:
//
// https://github.com/kangax/html-minifier
//
const MINIFIER_OPTIONS = {
caseSensitive: false,
collapseBooleanAttributes: true,
collapseInlineTagWhitespace: true,
collapseWhitespace: true,
conservativeCollapse: false,
decodeEntities: true,
html5: true,
includeAutoGeneratedTags: false,
keepClosingSlash: false,
preserveLineBreaks: false,
preventAttributesEscaping: true,
processConditionalComments: false,
quoteCharacter: '"',
removeAttributeQuotes: true,
removeEmptyAttributes: true,
removeEmptyElements: false,
removeOptionalTags: true,
removeRedundantAttributes: true,
removeScriptTypeAttributes: true,
removeStyleLinkTypeAttributes: true,
sortAttributes: true,
sortClassName: true,
useShortDoctype: true,
}
let {minify} = this.options
if (minify) {
// Minify.
serialized = minifyHtml(serialized, {
minifyCSS: true,
minifyJS: true,
removeComments: true,
...MINIFIER_OPTIONS,
})
} else {
// Beautify.
serialized = minifyHtml(serialized, MINIFIER_OPTIONS)
serialized = beautifyHtml(serialized, {
indent_char: ' ',
indent_inner_html: true,
indent_size: 2,
sep: '\n',
unformatted: ['code', 'pre'],
})
}
return serialized
}
/**
* Load the template and parse it using Parse5.
*/
parseTemplate() {
log('Loading template...')
const SOURCE = readFileSync(this.options.template, 'utf8')
log('Parsing template...')
this.tree = parse(SOURCE)
log('Done loading and parsing template.')
}
async tapAfterCompile(compilation, done) {
console.log()
console.log()
for (let asset of compilation.modules) {
if (asset.rawRequest == 'assets/logo.svg')
console.log(asset)
}
console.log()
console.log()
// Add the template to the dependencies to trigger a rebuild on change in
// watch mode.
compilation.fileDependencies.add(this.options.template)
// Emit the final HTML.
const FINAL_HTML = this.getHtmlString()
compilation.assets['index.html'] = {
source: () => FINAL_HTML,
size: () => FINAL_HTML.length,
}
done()
}
async tapBeforeRun(compiler, done) {
this.parseTemplate()
// Add assets to the compilation.
for (let {context, path} of this.extractAssetPaths()) {
new PrefetchPlugin(context, path)
.apply(compiler)
}
done()
}
}
Found the answer, after I loaded the dependencies, I can access the generated module's source:
// Index the modules generated in the child compiler by raw request.
let byRawRequest = new Map
for (let asset of compilation.modules)
byRawRequest.set(asset.rawRequest, asset)
// Replace the template requests with the result from modules generated in
// the child compiler.
for (let {node, request} of this._getAssetRequests()) {
if (!byRawRequest.has(request))
continue
const ASSET = byRawRequest.get(request)
const SOURCE = ASSET.originalSource().source()
const NEW_REQUEST = execAssetModule(SOURCE)
setResourceRequest(node, NEW_REQUEST)
log(`Changed: ${prettyFormat({from: request, to: NEW_REQUEST})}`)
}
And execute the module's source with a VM:
function execAssetModule(code, path) {
let script = new Script(code)
let exports = {}
let sandbox = {
__webpack_public_path__: '',
module: {exports},
exports,
}
script.runInNewContext(sandbox)
return sandbox.module.exports
}
I have an application with the server code running on Node.js and using Dojo. I have a config module defined like:
define([
'dojo/node!nconf',
'dojo/_base/config'
], function (nconf, dojoConfig) {
nconf.argv().file({
file: dojoConfig.baseDir + '/config.json'
});
console.log('-- file name:', dojoConfig.baseDir + '/config.json');
console.log('-- context:', nconf.get('context'));
// ... logic here ...
return nconf.get(nconf.get('context'));
});
To be able to unit test this module, I've written two mocks: one for the nconf native module and one for dojoConfig. Here is the test:
define([
'require',
'intern!object',
'intern/chai!assert'
], function (require, registerSuite, assert) {
registerSuite({
name: 'config utility',
'load default settings': function () {
require.undef('dojo/node!nconf');
require.undef('dojo/_base/config');
require({ map: {
'*': {
'dojo/node!nconf': 'server/utils/tests/nconfMock',
'dojo/_base/config': 'server/utils/tests/dojoConfigMock'
}
}});
require(['../config', './nconfMock'], this.async(1000).callback(
function (config, nconfMock) {
assert.isNotNull(config);
assert.isNotNull(nconf);
// assert.deepEqual(config, nconfMock.contextSettings.test);
}
));
}
});
});
I can see that my mock of dojoConfig is correctly loaded, but not the mock of the nconf module. During a webcast on Intern, Dylan mentioned that the mapping does not consider the plugin, that there's the way to force dojo/node module to load this nconfMock. Would you mind to give me more details?
Obviously, this is verbose, so if this continues to be a common request, we’ll probably do something to make it simpler in the future.
Important note: Without mapping dojo/node to intern/node_modules/dojo/node, the loading of my initial config module as defined above fails in the Intern environment. The mapping is done in the intern.js file. The reported error is:
Error: node plugin failed to load because environment is not Node.js
at d:/git/fco2/src/libs/dojo/node.js:3:9
at execModule (d:\git\fco2\node_modules\intern\node_modules\dojo\dojo.js:512:54)
at d:\git\fco2\node_modules\intern\node_modules\dojo\dojo.js:579:7
at guardCheckComplete (d:\git\fco2\node_modules\intern\node_modules\dojo\dojo.js:563:4)
at checkComplete (d:\git\fco2\node_modules\intern\node_modules\dojo\dojo.js:571:27)
at onLoadCallback (d:\git\fco2\node_modules\intern\node_modules\dojo\dojo.js:653:7)
at d:\git\fco2\node_modules\intern\node_modules\dojo\dojo.js:758:5
at fs.js:266:14
at Object.oncomplete (fs.js:107:15)
Solution: As suggested by Colin Snover below, I now use Mockery. I also do NOT use the contextual require, only the default one. Here is a (simplified) solution working with the version 1.9.3 of the Dojo toolkit.
define([
'intern!object',
'intern/chai!assert',
'intern/node_modules/dojo/node!mockery',
'./nconfMock'
], function (registerSuite, assert, mockery, nconfMock) {
registerSuite({
name: 'config utility',
teardown: function () {
mockery.disable();
mockery.deregisterAll();
require({ map: { '*': { 'dojo/_base/config': 'dojo/_base/config' } } });
require.undef('dojo/_base/config');
require.undef('server/utils/config');
},
'load default settings': function () {
mockery.enable();
mockery.registerMock('nconf', nconfMock);
require({ map: { '*': { 'dojo/_base/config': 'server/utils/tests/dojoConfigMock' } } });
require.undef('dojo/_base/config');
require.undef('server/utils/config');
require(
['server/utils/config'],
this.async(1000).callback(function (config) {
assert.isNotNull(config);
assert.deepEqual(config, nconfMock.contextSettings.test);
})
);
}
});
});
Thanks, Dom
In order to mock a Node.js dependency, you will probably want to simply use one of the various available projects for mocking Node.js modules. Mockery is a good choice since it’s stand-alone.
Since it looks like you’re using dojo/node and not the one from Intern, in your case, you’d do it like this:
define([
'intern!object', 'dojo/node!mockery', 'dojo/Deferred', 'require'
], function (registerSuite, mockery, Deferred, require) {
var moduleUsingMock;
registerSuite({
setup: function () {
var dfd = new Deferred();
mockery.enable();
mockery.registerMock('module-to-mock', mockObject);
require([ 'module-using-mock' ], function (value) {
moduleUsingMock = value;
dfd.resolve();
});
return dfd.promise;
},
teardown: function () {
mockery.disable();
},
'some test': function () {
moduleUsingMock.whatever();
// ...
}
});
});