How do I npm install for current directory and also subdirectories with package.json files? - node.js

I have a an application which is a web game server and say for example I have node_modules which I use in directory ./ and I have a proper package.json for those. it happens that in directory ./public/ I have a website being served which itself uses node_modules and also has a proper package.json for itself.
I know I can do this by navigating the directories. But is there a command or way to automate this so that it is easier for other developers to bootstrap the application in their system?

Assuming you are on Linux/OSX, you could try something like this:
find ./apps/* -maxdepth 1 -name package.json -execdir npm install \;
Arguments:
./apps/* - the path to search. I would advise being very specific here to avoid it picking up package.json files in other node_modules directories (see maxdepth below).
-maxdepth 1 - Only traverse a depth of 1 (i.e. the current directory - don't go into subdirectories) in the search path
-name package.json - the filename to match in the search
-execdir npm install \; - for each result in the search, run npm install in the directory that holds the file (in this case package.json). Note that the backslash escaping the semicolon has to be escaped itself in the JSON file.
Put this in the postinstall hook in your root package.json and it will run everytime you do an npm install:
"scripts": {
"postinstall": "find ./apps/* -name package.json -maxdepth 1 -execdir npm install \\;"
}

For cross-platform support (incl. Windows) you may try my solution.
Pure Node.js
Run it as a "preinstall" npm script
const path = require('path')
const fs = require('fs')
const child_process = require('child_process')
const root = process.cwd()
npm_install_recursive(root)
function npm_install_recursive(folder)
{
const has_package_json = fs.existsSync(path.join(folder, 'package.json'))
if (!has_package_json && path.basename(folder) !== 'code')
{
return
}
// Since this script is intended to be run as a "preinstall" command,
// skip the root folder, because it will be `npm install`ed in the end.
if (has_package_json)
{
if (folder === root)
{
console.log('===================================================================')
console.log(`Performing "npm install" inside root folder`)
console.log('===================================================================')
}
else
{
console.log('===================================================================')
console.log(`Performing "npm install" inside ${folder === root ? 'root folder' : './' + path.relative(root, folder)}`)
console.log('===================================================================')
}
npm_install(folder)
}
for (let subfolder of subfolders(folder))
{
npm_install_recursive(subfolder)
}
}
function npm_install(where)
{
child_process.execSync('npm install', { cwd: where, env: process.env, stdio: 'inherit' })
}
function subfolders(folder)
{
return fs.readdirSync(folder)
.filter(subfolder => fs.statSync(path.join(folder, subfolder)).isDirectory())
.filter(subfolder => subfolder !== 'node_modules' && subfolder[0] !== '.')
.map(subfolder => path.join(folder, subfolder))
}

Related

Run another yarn/npm task within a package.json, without specifying yarn or npm

I have a task in my package.json "deploy", which needs to first call "build". I have specified it like this:
"deploy": "yarn run build; ./deploy.sh",
The problem is that this hard codes yarn as the package manager. So if someone doesn't use yarn, it doesn't work. Switching to npm causes a similar issue.
What's a good way to achieve this while remaining agnostic to the choice of npm or yarn?
One simple approach is to use the npm-run-all package, whose documentation states:
Yarn Compatibility
If a script is invoked with Yarn, npm-run-all will correctly use Yarn to execute the plan's child scripts.
So you can do this:
"predeploy": "run-s build",
"deploy": "./deploy.sh",
And the predeploy step will use either npm or yarn depending on how you invoked the deploy task.
I think it is good to have the runs in package.json remain package manager agnostic so that they aren't tied to a specific package manager, but within a project, it is probably prudent to agree on the use of a single package manager so that you're not dealing with conflicting lockfiles.
It's probably not ideal, but you could run a .js file at your project root to make these checks...
You could create a file at your project root called yarnpm.js (or whatever), and call said file in your package.json deploy command..
// package.json (trimmed)
"scripts": {
"deploy": "node yarnpm",
"build": "whatever build command you use"
},
// yarnpm.js
const fs = require('fs');
const FILE_NAME = process.argv[1].replace(/^.*[\\\/]/, '');
// Command you wish to run with `{{}}` in place of `npm` or `yarn'
// This would allow you to easily run multiple `npm`/`yarn` commands without much work
// For example, `{{}} run one && {{}} run two
const COMMAND_TO_RUN = '{{}} run build; ./deploy.sh';
try {
if (fs.existsSync('./package-lock.json')) { // Check for `npm`
execute(COMMAND_TO_RUN.replace('{{}}', 'npm'));
} else if (fs.existsSync('./yarn.lock')) { // Check for `yarn`
execute(COMMAND_TO_RUN.replace('{{}}', 'yarn'));
} else {
console.log('\x1b[33m', `[${FILE_NAME}] Unable to locate either npm or yarn!`, '\033[0m');
}
} catch (err) {
console.log('\x1b[31m', `[${FILE_NAME}] Unable to deploy!`, '\033[0m');
}
function execute(command) { // Helper function, to make running `exec` easier
require('child_process').exec(command,
(error, stdout, stderr) => {
if (error) {
console.log(`error: ${error.message}`);
return;
}
if (stderr) {
console.log(`stderr: ${stderr}`);
return;
}
console.log(stdout);
});
}
Hope this helps in some way! Cheers.
EDIT:
...or if you wanted to parameterize the yarnpm.js script, to make it easily reusable, and to keep all "commands" inside the package.json file, you could do something like this..
// package.json (trimmed, parameterized)
"scripts": {
"deploy": "node yarnpm '{{}} run build; ./deploy.sh'",
"build": "node build.js"
},
// yarnpm.js (parameterized)
const COMMAND_TO_RUN = process.argv[2]; // Technically, the first 'parameter' is the third index
const FILE_NAME = process.argv[1].replace(/^.*[\\\/]/, '');
if (COMMAND_TO_RUN) {
const fs = require('fs');
try {
if (fs.existsSync('./package-lock.json')) { // Check for `npm`
execute(COMMAND_TO_RUN.replace('{{}}', 'npm'));
} else if (fs.existsSync('./yarn.lock')) { // Check for `yarn`
execute(COMMAND_TO_RUN.replace('{{}}', 'yarn'));
} else {
console.log('\x1b[33m', `[${FILE_NAME}] Unable to locate either npm or yarn!`, '\033[0m');
}
} catch (err) {
console.log('\x1b[31m', `[${FILE_NAME}] Unable to deploy!`, '\033[0m');
}
function execute(command) { // Helper function, to make running `exec` easier
require('child_process').exec(command,
(error, stdout, stderr) => {
if (error) {
console.log(`error: ${error.message}`);
return;
}
if (stderr) {
console.log(`stderr: ${stderr}`);
return;
}
console.log(stdout);
});
}
} else {
console.log('\x1b[31m', `[${FILE_NAME}] Requires a single argument!`, '\033[0m')
}
What if check before run?
You can create a new file called build.sh, and it's content below:
# check if current user installed node environment, if not, auto install it.
if command -v node >/dev/null 2>&1; then
echo "version of node: $(node -v)"
echo "version of npm: $(npm -v)"
else
# auto install node environment, suppose platform is centos,
# need change this part to apply other platform.
curl --silent --location https://rpm.nodesource.com/setup_12.x | sudo bash -
yum -y install nodejs
fi
npm run build
Then your script will be:
{
"deploy": "./build.sh && ./deploy.sh"
}
So I think I have a much simpler solution:
"deploy": "yarn run build || npm run build; ./deploy.sh",
Its only real downside is in the case where yarn exists, but the build fails, then npm run build will also take place.

Yarn install production dependencies of a single package in workspace

I'm trying to install the production dependencies only for a single package in my workspace. Is that possible?
I've already tried this:
yarn workspace my-package-in-workspace install -- --prod
But it is installing all production dependencies of all my packages.
yarn 1 doesn't support it as far as I know.
If you are trying to install a specific package in a dockerfile, then there is a workaround:
copy the yarn.lock file and the root package.json
copy only the packages's package.json that you need: your package and which other packages that your package depends on (locally in the monorepo).
in the dockerfile, manually remove all the devDependnecies of all the package.json(s) that you copied.
run yarn install on the root package.json.
Note:
Deterministic installation - It is recommended to do so in monorepos to force deterministic install - https://stackoverflow.com/a/64503207/806963
Full dockefile example:
FROM node:12
WORKDIR /usr/project
COPY yarn.lock package.json remove-all-dev-deps-from-all-package-jsons.js change-version.js ./
ARG package_path=packages/dancer-placing-manager
COPY ${package_path}/package.json ./${package_path}/package.json
RUN node remove-all-dev-deps-from-all-package-jsons.js && rm remove-all-dev-deps-from-all-package-jsons.js
RUN yarn install --frozen-lockfile --production
COPY ${package_path}/dist/src ./${package_path}/dist/src
COPY ${package_path}/src ./${package_path}/src
CMD node --unhandled-rejections=strict ./packages/dancer-placing-manager/dist/src/index.js
remove-all-dev-deps-from-all-package-jsons.js:
const fs = require('fs')
const path = require('path')
const { execSync } = require('child_process')
async function deleteDevDeps(packageJsonPath) {
const packageJson = require(packageJsonPath)
delete packageJson.devDependencies
await new Promise((res, rej) =>
fs.writeFile(packageJsonPath, JSON.stringify(packageJson, null, 2), 'utf-8', error => (error ? rej(error) : res())),
)
}
function getSubPackagesPaths(repoPath) {
const result = execSync(`yarn workspaces --json info`).toString()
const workspacesInfo = JSON.parse(JSON.parse(result).data)
return Object.values(workspacesInfo)
.map(workspaceInfo => workspaceInfo.location)
.map(packagePath => path.join(repoPath, packagePath, 'package.json'))
}
async function main() {
const repoPath = __dirname
const packageJsonPath = path.join(repoPath, 'package.json')
await deleteDevDeps(packageJsonPath)
await Promise.all(getSubPackagesPaths(repoPath).map(packageJsonPath => deleteDevDeps(packageJsonPath)))
}
if (require.main === module) {
main()
}
It looks like this is easily possible now with Yarn 2: https://yarnpkg.com/cli/workspaces/focus
But I haven't tried myself.
Here is my solution for Yarn 1:
# Install dependencies for the whole monorepo because
# 1. The --ignore-workspaces flag is not implemented https://github.com/yarnpkg/yarn/issues/4099
# 2. The --focus flag is broken https://github.com/yarnpkg/yarn/issues/6715
# Avoid the target workspace dependencies to land in the root node_modules.
sed -i 's|"dependencies":|"workspaces": { "nohoist": ["**"] }, "dependencies":|g' apps/target-app/package.json
# Run `yarn install` twice to workaround https://github.com/yarnpkg/yarn/issues/6988
yarn || yarn
# Find all linked node_modules and dereference them so that there are no broken
# symlinks if the target-app is copied somewhere. (Don't use
# `cp -rL apps/target-app some/destination` because then it also dereferences
# node_modules/.bin/* and thus breaks them.)
cd apps/target-app/node_modules
for f in $(find . -maxdepth 1 -type l)
do
l=$(readlink -f $f) && rm $f && cp -rf $l $f
done
Now apps/target-app can be copied and used as a standalone app.
I would not recommend it for production. It is slow (because it installs dependencies for the whole monorepo) and not really reliable (because there may be additional issues with symlinks).
You may try
yarn workspace #my-monorepo/my-package-in-workspace install -- --prod

Grunt and NPM, package all production dependencies

I am unsure when the way the NPM installs dependencies changed.
In the past I remember that if in my project.json I had a dependency on "abc", which in turn would depend on "xyz", a npm install would result in something like:
package.json
node_modules/
abc/
node_modules/
xyz/
some-dev-dep/
When packaging my node project to be used by AWS Lambda, I would have to include that node_modules structure (less any dev-dependencies that were there). I would use Grunt for my packaging, so I wrote this handy thing to help me get all production dependencies into this zip (extracting part of my gruntfile.js):
function getDependencies(pkg) {
return Object.keys(pkg.dependencies)
.map(function(val) { return val + '/**'; });
}
var config = {
compress: {
prod: {
options: {
archive: 'public/lambda.zip'
},
files: [
{ src: 'index.js', dest: '/' },
{ expand: true, cwd: 'node_modules/', src: getDependencies(pkg), dest: '/node_modules' }
]
}
}
};
This would work because dependencies of my dependencies were nested.
Recently (or maybe not-so-recently) this has changed (I am unsure when as I was using very old version of NPM and updated it recently).
Now if I depend on "abc" which in turn depends on "xyz" I will get:
node_modules/
abc/
xyz/
some-dev-dep/
As you can see, my way of getting only production dependencies just won't work.
Is there any easy way to get only list of production dependencies (together with sub-dependencies) within grunt job?
I could do it using recursive function scanning for my dependencies, and then checking project.json files of those and then searching for sub-dependencies etc. This approach seems like a lot of hassle that is possibly a common scenario for many projects...
Here is a function that returns an array of the production dependency module names. (Note: you might need to have the 'npm' module installed locally in your project for this to work.)
/**
* Returns an array of the node dependencies needed for production.
* See https://docs.npmjs.com/cli/ls for info on the 'npm ls' command.
*/
var getProdDependencies = function(callback) {
require('child_process').exec('npm ls --prod=true --parseable=true', undefined,
function(err, stdout, stderr) {
var array = stdout.split('\n');
var nodeModuleNames = [];
array.forEach(function(line) {
var index = line.indexOf('node_modules');
if (index > -1) {
nodeModuleNames.push(line.substr(index + 13));
}
});
callback(nodeModuleNames);
});
};
This change was introduced with the release of npm 3 (see npm v3 Dependency Resolution).
It's not exactly clear why you need to use Grunt at all. If what you want to do is get only production dependencies you can simply run:
npm install --production
With the --production flag, all dev dependencies will be ignored. The same is also true if the NODE_ENV environment variable is set to 'production'.

How to use environment variables in package.json

Because we don't want sensitive data in the project code, including the package.json file, using environment variables would be a logical choice in my opinion.
Example package.json:
"dependencies": {
"accounting": "~0.4.0",
"async": "~1.4.2",
"my-private-module":"git+https://${BB_USER}:${BB_PASS}#bitbucket.org/foo/bar.git"
Is this possible?
The question is not if this is wise or not good, just if it's possible.
In case you use .env file, let's use grep or eval to get a value environment variable from the .env file.
Updated start2 as #Paul suggested:
"scripts": {
"start": "NODE_ENV=$(grep NODE_ENV .env | cut -d '=' -f2) some_script",
"start2": "eval $(grep '^NODE_ENV' .env) && some_script"
}
I have similar but different requirement. For me, I want to use environment variables in the scripts.
Instead of using the environment variables directly in package.json, I do:
"some-script": "./scripts/some-script.sh",
And in some-script.sh:
#!/bin/sh
npm run some-other-script -- --prop=$SOME_ENV_VAR
Here's how I managed to work around package.json to achieve the same purpose. It uses a script that reads from a custom section of package.json for URL modules, interpolates environment variables in them, and installs them with npm install --no-save (the --no-save could be omitted, depending on the usecase).
As a bonus: it tries to read the env variable from .env.json, which can be gitignore'd, and very useful for development.
Create a script that will read from a custom section of package.json
env-dependencies.js
const execSync = require('child_process').execSync
const pkg = require('./package.json')
if (!pkg.envDependencies) {
return process.exit(0)
}
let env = Object.assign({}, process.env)
if (typeof pkg.envDependencies.localJSON === 'string') {
try {
Object.assign(env, require(pkg.envDependencies.localJSON))
} catch (err) {
console.log(`Could not read or parse pkg.envDependencies.localJSON. Processing with env only.`)
}
}
if (typeof pkg.envDependencies.urls === 'undefined') {
console.log(`pkg.envDependencies.urls not found or empty. Passing.`)
process.exit(0)
}
if (
!Array.isArray(pkg.envDependencies.urls) ||
!(pkg.envDependencies.urls.every(url => typeof url === 'string'))
) {
throw new Error(`pkg.envDependencies.urls should have a signature of String[]`)
}
const parsed = pkg.envDependencies.urls
.map(url => url.replace(/\${([0-9a-zA-Z_]*)}/g, (_, varName) => {
if (typeof env[varName] === 'string') {
return env[varName]
} else {
throw new Error(`Could not read env variable ${varName} in url ${url}`)
}
}))
.join(' ')
try {
execSync('npm install --no-save ' + parsed, { stdio: [0, 1, 2] })
process.exit(0)
} catch (err) {
throw new Error('Could not install pkg.envDependencies. Are you sure the remote URLs all have a package.json?')
}
Add a "postinstall": "node env-dependencies.js" to your package.json, that way it will be run on every npm install
Add your private git repos to package.json using the URLs you want (note: they all must have a package.json at root!):
"envDependencies": {
"localJSON": "./.env.json",
"urls": [
"git+https://${GITHUB_PERSONAL_ACCESS_TOKEN}#github.com/user/repo#semver:^2.0.0"
]
},
(the semver specifier #semver:^2.0.0 can be omitted, but refers to a git tag, which can be very useful, as it makes your git server a fully-fledge package manager)
npm install
No, it's not possible. You should access the repo using git+ssh, and store a private key in ~/.ssh.
Your line then looks like:
"my-private-module":"git+ssh://git#bitbucket.org/foo/bar.git"
Which doesn't contain anything sensitive.
No it isn't possible as npm does not treat any string values as any kind of templates.
It may be better to just use git+ssh (if your provider supports it) with an ssh agent.
You can use environment values to inject in your package.json like this:
Any environment variables that start with npm_config_ will be interpreted as a configuration parameter. For example, putting npm_config_foo=bar in your environment will set the foo configuration parameter to bar. Any environment configurations that are not given a value will be given the value of true. Config values are case-insensitive, so NPM_CONFIG_FOO=bar will work the same.
https://docs.npmjs.com/misc/config#environment-variables
I had the same need and my solution was based on #Long Nguyen's response. This way, I can only rely on what's defined on the .env file.
.env
...
SKIP_PREFLIGHT_CHECK=true
...
package.json
...
"scripts": {
"test": "yarn cross-env $(grep SKIP_PREFLIGHT_CHECK ../../.env) react-app-rewired test --watchAll=false"
}
...
You can install package https://www.npmjs.com/package/env-cmd
and all your envs from .env file will be visible
ie:
./.env:
ENV1=THANKS
ENV2=FOR ALL
ENV3=THE FISH
Package.json:
"scripts": {
"test": "env-cmd pact-broker can-i-deploy --broker-token=${ENV1}"
}
or another example from your question:
"my-private-module":"env-cmd git+https://${BB_USER}:${BB_PASS}#bitbucket.org/foo/bar.git"
For complicated environment variables, you can use
https://stedolan.github.io/jq/
to access JSON file (env file at your case)
JSON file could be something like
{
"env" :
{
"username" : "1345345",
"Groups" : [],
"arraytest" : [
{
"yes" : "1",
"no" : "0"
}
]
}
}
so the script could be something like this to access yes value
"scripts": {
"yes": "jq [].arraytest[0].yes?"
}
If you're running node inside a Docker container
Use Docker Compose to inject the env variable
app:
environment:
- NODE_ENV=staging
Run your package.json script from your Dockerfile
CMD [ "npm", "run", "start" ]
Use echo or printenv
"scripts": {
"start": "node -r dotenv/config app.js dotenv_config_path=/run/secrets/$(echo $NODE_ENV)"
"start": "node -r dotenv/config app.js dotenv_config_path=/run/secrets/$(printenv NODE_ENV)"
}
Don't use this for sensitive env variables. It's a really good way to point to a Docker secrets file (like this example shows).

How quickly check whether to run npm install Node

Next script goes thought all folders and installs dependencies
var fs = require( "fs" ),
path = require( "path" ),
child_process = require( "child_process" );
var rootPath = "./";
var dirs = fs.readdirSync( rootPath )
.filter( function( dir ) {
return fs.statSync( path.join( rootPath, dir )).isDirectory();
});
var install = function()
{
if ( dirs.length === 0 )
return;
var dir = dirs.shift();
console.log( "installing dependencies for : '" + dir + "'" );
child_process.exec( "npm prune --production | npm install", {
cwd: rootPath + dir
}, install );
};
install();
How to run npm install command only if package.json exists in folder?
Try this command:
ls | grep package.json && (npm prune --production | npm install)
I assume you are running this in Linux.
In theory, if I remember correctly, the ouput of the command ls will be piped to the grep command, and only if the grep command will have found a result, then the commands (npm prune --production | npm install) will be executed.
This is not tested by me at the moment of writting this, since I don't have a Linux box right now to test this, but I hope it works.
UPDATE:
The efficient command, as per Dan's comment would be
test -f package.json && (npm prune --production | npm install)

Resources