Running npm install with sbt - node.js

So I have a sbt project that uses sbt-js-engine and sbt-webpack plugins.
It successfully gets and resolves npm packages just fine. And then webpack would build the project.
I have added a npm install script into package.json like so,
"scripts": {
"install": "bower install"
}
However, the problem I am currently having is that when I run webpack (which intern uses sbt-js-engine ) it runs npm update instead of npm install.
Heres an excerpt of my build.sbt,
lazy val common = project.in(file("common")).
enablePlugins(SbtWeb).
settings(
sourceDirectory in webpack := baseDirectory.value,
resourceManaged in webpack := (resourceManaged in webpack in root).value,
includeFilter in webpack := ("*.jsx" || "*.js" || "*.json") && new FileFilter {
#tailrec
override def accept(pathname: File): Boolean = {
if (pathname == null) false
else if (pathname.getName == "javascripts") true
else accept(pathname.getParentFile)
}
},
JsEngineKeys.engineType := JsEngineKeys.EngineType.Node
)
Is there anyway I could run npm install instead or even before as a depedency for webpack task ?

You could try something like this:
sourceDirectory in webpack := {
Process("/usr/local/bin/npm install", file("[path to working dir]")).!
baseDirectory.value
}
That would mean it would run at same time as setting the webpack settings.

Related

Run another yarn/npm task within a package.json, without specifying yarn or npm

I have a task in my package.json "deploy", which needs to first call "build". I have specified it like this:
"deploy": "yarn run build; ./deploy.sh",
The problem is that this hard codes yarn as the package manager. So if someone doesn't use yarn, it doesn't work. Switching to npm causes a similar issue.
What's a good way to achieve this while remaining agnostic to the choice of npm or yarn?
One simple approach is to use the npm-run-all package, whose documentation states:
Yarn Compatibility
If a script is invoked with Yarn, npm-run-all will correctly use Yarn to execute the plan's child scripts.
So you can do this:
"predeploy": "run-s build",
"deploy": "./deploy.sh",
And the predeploy step will use either npm or yarn depending on how you invoked the deploy task.
I think it is good to have the runs in package.json remain package manager agnostic so that they aren't tied to a specific package manager, but within a project, it is probably prudent to agree on the use of a single package manager so that you're not dealing with conflicting lockfiles.
It's probably not ideal, but you could run a .js file at your project root to make these checks...
You could create a file at your project root called yarnpm.js (or whatever), and call said file in your package.json deploy command..
// package.json (trimmed)
"scripts": {
"deploy": "node yarnpm",
"build": "whatever build command you use"
},
// yarnpm.js
const fs = require('fs');
const FILE_NAME = process.argv[1].replace(/^.*[\\\/]/, '');
// Command you wish to run with `{{}}` in place of `npm` or `yarn'
// This would allow you to easily run multiple `npm`/`yarn` commands without much work
// For example, `{{}} run one && {{}} run two
const COMMAND_TO_RUN = '{{}} run build; ./deploy.sh';
try {
if (fs.existsSync('./package-lock.json')) { // Check for `npm`
execute(COMMAND_TO_RUN.replace('{{}}', 'npm'));
} else if (fs.existsSync('./yarn.lock')) { // Check for `yarn`
execute(COMMAND_TO_RUN.replace('{{}}', 'yarn'));
} else {
console.log('\x1b[33m', `[${FILE_NAME}] Unable to locate either npm or yarn!`, '\033[0m');
}
} catch (err) {
console.log('\x1b[31m', `[${FILE_NAME}] Unable to deploy!`, '\033[0m');
}
function execute(command) { // Helper function, to make running `exec` easier
require('child_process').exec(command,
(error, stdout, stderr) => {
if (error) {
console.log(`error: ${error.message}`);
return;
}
if (stderr) {
console.log(`stderr: ${stderr}`);
return;
}
console.log(stdout);
});
}
Hope this helps in some way! Cheers.
EDIT:
...or if you wanted to parameterize the yarnpm.js script, to make it easily reusable, and to keep all "commands" inside the package.json file, you could do something like this..
// package.json (trimmed, parameterized)
"scripts": {
"deploy": "node yarnpm '{{}} run build; ./deploy.sh'",
"build": "node build.js"
},
// yarnpm.js (parameterized)
const COMMAND_TO_RUN = process.argv[2]; // Technically, the first 'parameter' is the third index
const FILE_NAME = process.argv[1].replace(/^.*[\\\/]/, '');
if (COMMAND_TO_RUN) {
const fs = require('fs');
try {
if (fs.existsSync('./package-lock.json')) { // Check for `npm`
execute(COMMAND_TO_RUN.replace('{{}}', 'npm'));
} else if (fs.existsSync('./yarn.lock')) { // Check for `yarn`
execute(COMMAND_TO_RUN.replace('{{}}', 'yarn'));
} else {
console.log('\x1b[33m', `[${FILE_NAME}] Unable to locate either npm or yarn!`, '\033[0m');
}
} catch (err) {
console.log('\x1b[31m', `[${FILE_NAME}] Unable to deploy!`, '\033[0m');
}
function execute(command) { // Helper function, to make running `exec` easier
require('child_process').exec(command,
(error, stdout, stderr) => {
if (error) {
console.log(`error: ${error.message}`);
return;
}
if (stderr) {
console.log(`stderr: ${stderr}`);
return;
}
console.log(stdout);
});
}
} else {
console.log('\x1b[31m', `[${FILE_NAME}] Requires a single argument!`, '\033[0m')
}
What if check before run?
You can create a new file called build.sh, and it's content below:
# check if current user installed node environment, if not, auto install it.
if command -v node >/dev/null 2>&1; then
echo "version of node: $(node -v)"
echo "version of npm: $(npm -v)"
else
# auto install node environment, suppose platform is centos,
# need change this part to apply other platform.
curl --silent --location https://rpm.nodesource.com/setup_12.x | sudo bash -
yum -y install nodejs
fi
npm run build
Then your script will be:
{
"deploy": "./build.sh && ./deploy.sh"
}
So I think I have a much simpler solution:
"deploy": "yarn run build || npm run build; ./deploy.sh",
Its only real downside is in the case where yarn exists, but the build fails, then npm run build will also take place.

npm package.json aliases like webpack

i am trying to alias a module however i am not sure how to do that with package.json
in webpack you would do something like this:
module.exports = {
//...
resolve: {
alias: {
'pixi.js': 'pixi.js-legacy'
}
}
};
But what is the equivalent without webpack?
Since NPM Version 6.9 of March 2019 it is supported without installing any additional packages (see the RFC):
npm i aliasName#npm:packageToInstall
⬇⬇⬇
// package.json
"dependencies": {
"aliasName": "npm:packageToInstall#^1.6.1"
}
The idea seems to be that npm: is a URI-like scheme in a dependency version specifier.
Usage:
const alias = require( 'aliasName' );
There is a npm package for this: module-alias.
After installing it you can add your aliases to the package.json, like so:
"_moduleAliases": {
"#root" : ".", // Application's root
"#deep" : "src/some/very/deep/directory/or/file",
"#my_module" : "lib/some-file.js",
"something" : "src/foo", // Or without #. Actually, it could be any string
}
Make sure to add this line at the top of your app's main file:
require('module-alias/register');
You should only use this in final products (and not packages you intend to publish in npm or use elsewhere) - it modifies the behavior of require.

Yarn install production dependencies of a single package in workspace

I'm trying to install the production dependencies only for a single package in my workspace. Is that possible?
I've already tried this:
yarn workspace my-package-in-workspace install -- --prod
But it is installing all production dependencies of all my packages.
yarn 1 doesn't support it as far as I know.
If you are trying to install a specific package in a dockerfile, then there is a workaround:
copy the yarn.lock file and the root package.json
copy only the packages's package.json that you need: your package and which other packages that your package depends on (locally in the monorepo).
in the dockerfile, manually remove all the devDependnecies of all the package.json(s) that you copied.
run yarn install on the root package.json.
Note:
Deterministic installation - It is recommended to do so in monorepos to force deterministic install - https://stackoverflow.com/a/64503207/806963
Full dockefile example:
FROM node:12
WORKDIR /usr/project
COPY yarn.lock package.json remove-all-dev-deps-from-all-package-jsons.js change-version.js ./
ARG package_path=packages/dancer-placing-manager
COPY ${package_path}/package.json ./${package_path}/package.json
RUN node remove-all-dev-deps-from-all-package-jsons.js && rm remove-all-dev-deps-from-all-package-jsons.js
RUN yarn install --frozen-lockfile --production
COPY ${package_path}/dist/src ./${package_path}/dist/src
COPY ${package_path}/src ./${package_path}/src
CMD node --unhandled-rejections=strict ./packages/dancer-placing-manager/dist/src/index.js
remove-all-dev-deps-from-all-package-jsons.js:
const fs = require('fs')
const path = require('path')
const { execSync } = require('child_process')
async function deleteDevDeps(packageJsonPath) {
const packageJson = require(packageJsonPath)
delete packageJson.devDependencies
await new Promise((res, rej) =>
fs.writeFile(packageJsonPath, JSON.stringify(packageJson, null, 2), 'utf-8', error => (error ? rej(error) : res())),
)
}
function getSubPackagesPaths(repoPath) {
const result = execSync(`yarn workspaces --json info`).toString()
const workspacesInfo = JSON.parse(JSON.parse(result).data)
return Object.values(workspacesInfo)
.map(workspaceInfo => workspaceInfo.location)
.map(packagePath => path.join(repoPath, packagePath, 'package.json'))
}
async function main() {
const repoPath = __dirname
const packageJsonPath = path.join(repoPath, 'package.json')
await deleteDevDeps(packageJsonPath)
await Promise.all(getSubPackagesPaths(repoPath).map(packageJsonPath => deleteDevDeps(packageJsonPath)))
}
if (require.main === module) {
main()
}
It looks like this is easily possible now with Yarn 2: https://yarnpkg.com/cli/workspaces/focus
But I haven't tried myself.
Here is my solution for Yarn 1:
# Install dependencies for the whole monorepo because
# 1. The --ignore-workspaces flag is not implemented https://github.com/yarnpkg/yarn/issues/4099
# 2. The --focus flag is broken https://github.com/yarnpkg/yarn/issues/6715
# Avoid the target workspace dependencies to land in the root node_modules.
sed -i 's|"dependencies":|"workspaces": { "nohoist": ["**"] }, "dependencies":|g' apps/target-app/package.json
# Run `yarn install` twice to workaround https://github.com/yarnpkg/yarn/issues/6988
yarn || yarn
# Find all linked node_modules and dereference them so that there are no broken
# symlinks if the target-app is copied somewhere. (Don't use
# `cp -rL apps/target-app some/destination` because then it also dereferences
# node_modules/.bin/* and thus breaks them.)
cd apps/target-app/node_modules
for f in $(find . -maxdepth 1 -type l)
do
l=$(readlink -f $f) && rm $f && cp -rf $l $f
done
Now apps/target-app can be copied and used as a standalone app.
I would not recommend it for production. It is slow (because it installs dependencies for the whole monorepo) and not really reliable (because there may be additional issues with symlinks).
You may try
yarn workspace #my-monorepo/my-package-in-workspace install -- --prod

Grunt and NPM, package all production dependencies

I am unsure when the way the NPM installs dependencies changed.
In the past I remember that if in my project.json I had a dependency on "abc", which in turn would depend on "xyz", a npm install would result in something like:
package.json
node_modules/
abc/
node_modules/
xyz/
some-dev-dep/
When packaging my node project to be used by AWS Lambda, I would have to include that node_modules structure (less any dev-dependencies that were there). I would use Grunt for my packaging, so I wrote this handy thing to help me get all production dependencies into this zip (extracting part of my gruntfile.js):
function getDependencies(pkg) {
return Object.keys(pkg.dependencies)
.map(function(val) { return val + '/**'; });
}
var config = {
compress: {
prod: {
options: {
archive: 'public/lambda.zip'
},
files: [
{ src: 'index.js', dest: '/' },
{ expand: true, cwd: 'node_modules/', src: getDependencies(pkg), dest: '/node_modules' }
]
}
}
};
This would work because dependencies of my dependencies were nested.
Recently (or maybe not-so-recently) this has changed (I am unsure when as I was using very old version of NPM and updated it recently).
Now if I depend on "abc" which in turn depends on "xyz" I will get:
node_modules/
abc/
xyz/
some-dev-dep/
As you can see, my way of getting only production dependencies just won't work.
Is there any easy way to get only list of production dependencies (together with sub-dependencies) within grunt job?
I could do it using recursive function scanning for my dependencies, and then checking project.json files of those and then searching for sub-dependencies etc. This approach seems like a lot of hassle that is possibly a common scenario for many projects...
Here is a function that returns an array of the production dependency module names. (Note: you might need to have the 'npm' module installed locally in your project for this to work.)
/**
* Returns an array of the node dependencies needed for production.
* See https://docs.npmjs.com/cli/ls for info on the 'npm ls' command.
*/
var getProdDependencies = function(callback) {
require('child_process').exec('npm ls --prod=true --parseable=true', undefined,
function(err, stdout, stderr) {
var array = stdout.split('\n');
var nodeModuleNames = [];
array.forEach(function(line) {
var index = line.indexOf('node_modules');
if (index > -1) {
nodeModuleNames.push(line.substr(index + 13));
}
});
callback(nodeModuleNames);
});
};
This change was introduced with the release of npm 3 (see npm v3 Dependency Resolution).
It's not exactly clear why you need to use Grunt at all. If what you want to do is get only production dependencies you can simply run:
npm install --production
With the --production flag, all dev dependencies will be ignored. The same is also true if the NODE_ENV environment variable is set to 'production'.

How quickly check whether to run npm install Node

Next script goes thought all folders and installs dependencies
var fs = require( "fs" ),
path = require( "path" ),
child_process = require( "child_process" );
var rootPath = "./";
var dirs = fs.readdirSync( rootPath )
.filter( function( dir ) {
return fs.statSync( path.join( rootPath, dir )).isDirectory();
});
var install = function()
{
if ( dirs.length === 0 )
return;
var dir = dirs.shift();
console.log( "installing dependencies for : '" + dir + "'" );
child_process.exec( "npm prune --production | npm install", {
cwd: rootPath + dir
}, install );
};
install();
How to run npm install command only if package.json exists in folder?
Try this command:
ls | grep package.json && (npm prune --production | npm install)
I assume you are running this in Linux.
In theory, if I remember correctly, the ouput of the command ls will be piped to the grep command, and only if the grep command will have found a result, then the commands (npm prune --production | npm install) will be executed.
This is not tested by me at the moment of writting this, since I don't have a Linux box right now to test this, but I hope it works.
UPDATE:
The efficient command, as per Dan's comment would be
test -f package.json && (npm prune --production | npm install)

Resources