I have bought template MaterialPro from wrappixel website. After I got the template package already, I have followed getting started installation from document attached with template as the following:
Install Node.js From https://nodejs.org/en/download/
Open terminal navigating to material-pro/
Install npm: npm install --global npm#latest
Install yarn: npm install --global yarn
Install gulp: npm install --global gulp-cli
Copy gulp: gulp copy
The gulpfile.js inside root template is like this:
//gulpfile.js
console.time("Loading plugins"); //start measuring
const gulp = require('gulp'),
minifyCSS = require('gulp-clean-css'),
uglify = require('gulp-uglify'),
rename = require("gulp-rename"),
sass = require('gulp-sass'),
npmDist = require('gulp-npm-dist');
console.timeEnd('Loading plugins');
const sassFiles = 'src/assets/scss/*.scss',
cssDest = 'dist/css/';
//compile scss into css
function style() {
return gulp.src(sassFiles)
.pipe(sass().on('error', sass.logError))
.pipe(gulp.dest(cssDest));
}
//This is for the minify css
async function minifycss() {
return gulp.src(['dist/css/*.css', '!dist/css/**/*.min.css'])
.pipe(rename({
suffix: '.min'
}))
.pipe(minifyCSS())
.pipe(gulp.dest(cssDest));
}
// This is for the minifyjs
async function minifyjs() {
return gulp.src(['dist/js/custom.js','dist/js/app.js', '!dist/js/custom.min.js', '!dist/js/app.min.js'] )
.pipe(rename({
suffix: '.min'
}))
.pipe(uglify())
.pipe(gulp.dest('dist/js'));
}
// Copy dependencies to ./public/libs/
async function copy() {
gulp.src(npmDist(), {
base: './node_modules'
})
.pipe(gulp.dest('./src/assets/libs'));
};
async function watch() {
gulp.watch(['src/assets/scss/**/*.scss'], style);
gulp.watch(['dist/css/style.css'], minifycss);
gulp.watch(['dist/js/**/*.js', '!dist/js/**/*.min.js'], minifyjs);
}
gulp.task('default', watch);
exports.style = style;
exports.minifycss = minifycss;
exports.minifyjs = minifyjs;
exports.copy = copy;
exports.watch = watch;
After all, I made some changes to the template scss file, and run gulp command. At this point, the gulp command run never finished unitl now with output on terminal like this
Loading plugins: 539.410ms
[17:01:03] Using gulpfile ~/Documents/documentation/materialpro-bootstrap-latest/material-pro/gulpfile.js
[17:01:03] Starting 'default'...
[17:01:03] Finished 'default' after 18 ms
What was going wrong with this? Please kindly help, thanks.
P.S: Pls apologized if my question is incomplete or something, if I will try to add some more detail if suggested.
Your gulp code is fine. Made some change on your scss or js file it will show some changes.
Exaplantion
Your default command is gulp.task('default', watch);
when you run gulp it starts to watch your scss, css, js code. If there is new change it will execute the command.
Suggestion. Use like this.
async function watch() {
gulp.watch(['src/assets/scss/**/*.scss'], style, minifycss);
gulp.watch(['dist/js/**/*.js', '!dist/js/**/*.min.js'], minifyjs);
}
I'm working on a project in which the package.json file is missing. The developer has pushed the package-lock.json file without the package.json file.
How can I create a clean package.json from the package-lock.json file in case it is at all possible?
It's not possible to generate full package.json from package-lock.json because the latter doesn't contain all necessary data. It contains only a list of dependencies with specific versions without original semvers. Production and development dependencies are mixed up along with nested dependencies.
Fresh package.json could be generated, then augmented with these dependencies with something like:
const fs = require('fs');
const packageLock = require('./package-lock.json');
const package = require('./package.json');
package.dependencies = Object.entries(packageLock.dependencies)
.reduce((deps, [dep, { version }]) => Object.assign(deps, { [dep]: version }), {});
fs.writeFileSync('./package-new.json', JSON.stringify(package, null, 2));
Nested dependencies could be filtered out by checking requires key, but this can affect project's own dependencies.
Simply run npm init and it will pull all of the current dependencies from package-lock.json if you already have node_modules/ generated. If not, run npm ci to generate the node modules from the package-lock.json and then run npm init to generate the package.json file.
Slightly improved version of accepted answer script. Will pull locked versions out of the package-lock.
const fs = require('fs');
const packageLock = require('./package-lock.json');
const package = require('./package.json');
package.dependencies = Object.keys(package.dependencies)
.reduce((deps, dep) => Object.assign(deps, { [dep]: packageLock.dependencies[dep].version }), {});
package.devDependencies = Object.keys(package.devDependencies)
.reduce((deps, dep) => Object.assign(deps, { [dep]: packageLock.dependencies[dep].version }), {});
fs.writeFileSync('./package-new.json', JSON.stringify(package, null, 2));
I have a directory structure like this: /a/b/c
Directory c contains package.json and should contain node_modules.
How can I execute npm install from inside directory a?
I tried this way: npm --prefix b/c install b/c but this way, all the symlinks are created directly inside c instead of the default node_modules/.bin.
Is there any way to achieve that?
node: 6.2.2
npm: 3.10.2
Using an npm pre install hook in a package.json within your a directory is likely the best choice in this situation.
scripts: {
preinstall: `cd b/c && npm install`
}
This way running npm install in directory a will also do the c directory install and provide a seamless dev experience.
A bit overkill but may be useful...
With the help of recursion you can find node_modules.
you could run this file in a parent directory to find node_modules in child directory and pass npm arguments.
Note: tested on Windows
var child_process = require('child_process');
var fs = require('fs');
var path = require('path');
var safe = 0;
let args = process.argv.splice(2).toString().replace(/,/g ,' ');
function recurse(_path){
safe ++;
if(safe > 5000){
console.log('directory may be too large')
return
}
if(/node_modules$/.test(_path)){
let cwd = path.resolve(__dirname ,_path)
console.log('found node_modules at '+cwd)
child_process.exec(`start cmd.exe /k npm ${args}`,{cwd})
return
}
let directoryList = fs.readdirSync(_path);
directoryList.forEach(function(nextDir){
if(fs.statSync(_path+'/'+nextDir).isFile()){
return
}
if(/^\./.test(nextDir)){ //.folder beginging with .
return
}
recurse(_path+'/'+nextDir);
})
}
recurse('./' )
What is the most correct way to install npm packages in nested sub folders?
my-app
/my-sub-module
package.json
package.json
What is the best way to have packages in /my-sub-module be installed automatically when npm install run in my-app?
I prefer using post-install, if you know the names of the nested subdir. In package.json:
"scripts": {
"postinstall": "cd nested_dir && npm install",
...
}
Per #Scott's answer, the install|postinstall script is the simplest way as long as sub-directory names are known. This is how I run it for multiple sub dirs. For example, pretend we have api/, web/ and shared/ sub-projects in a monorepo root:
// In monorepo root package.json
{
...
"scripts": {
"postinstall": "(cd api && npm install); (cd web && npm install); (cd shared && npm install)"
},
}
On Windows, replace the ; between the parentesis with &&.
// In monorepo root package.json
{
...
"scripts": {
"postinstall": "(cd api && npm install) && (cd web && npm install) && (cd shared && npm install)"
},
}
Use Case 1: If you want be able to run npm commands from within each subdirectory (where each package.json is), you will need to use postinstall.
As I often use npm-run-all anyway, I use it to keep it nice and short (the part in the postinstall):
{
"install:demo": "cd projects/demo && npm install",
"install:design": "cd projects/design && npm install",
"install:utils": "cd projects/utils && npm install",
"postinstall": "run-p install:*"
}
This has the added benefit that I can install all at once, or individually. If you don't need this or don't want npm-run-all as a dependency, check out demisx's answer (using subshells in postinstall).
Use Case 2: If you will be running all npm commands from the root directory (and, for example, won't be using npm scripts in subdirectories), you could simply install each subdirectory like you would any dependecy:
npm install path/to/any/directory/with/a/package-json
In the latter case, don't be surprised that you don't find any node_modules or package-lock.json file in the sub-directories - all packages will be installed in the root node_modules, which is why you won't be able to run your npm commands (that require dependencies) from any of your subdirectories.
If you're not sure, use case 1 always works.
If you want to run a single command to install npm packages in nested subfolders, you can run a script via npm and main package.json in your root directory. The script will visit every subdirectory and run npm install.
Below is a .js script that will achieve the desired result:
var fs = require('fs');
var resolve = require('path').resolve;
var join = require('path').join;
var cp = require('child_process');
var os = require('os');
// get library path
var lib = resolve(__dirname, '../lib/');
fs.readdirSync(lib).forEach(function(mod) {
var modPath = join(lib, mod);
// ensure path has package.json
if (!fs.existsSync(join(modPath, 'package.json'))) {
return;
}
// npm binary based on OS
var npmCmd = os.platform().startsWith('win') ? 'npm.cmd' : 'npm';
// install folder
cp.spawn(npmCmd, ['i'], {
env: process.env,
cwd: modPath,
stdio: 'inherit'
});
})
Note that this is an example taken from a StrongLoop article that specifically addresses a modular node.js project structure (including nested components and package.json files).
As suggested, you could also achieve the same thing with a bash script.
EDIT: Made the code work in Windows
Just for reference in case people come across this question. You can now:
Add a package.json to a subfolder
Install this subfolder as reference-link in the main package.json:
npm install --save path/to/my/subfolder
The accepted answer works, but you can use --prefix to run npm commands in a selected location.
"postinstall": "npm --prefix ./nested_dir install"
And --prefix works for any npm command, not just install.
You can also view the current prefix with
npm prefix
And set your global install (-g) folder with
npm config set prefix "folder_path"
Maybe TMI, but you get the idea...
My solution is very similar.
Pure Node.js
The following script examines all subfolders (recursively) as long as they have package.json and runs npm install in each of them.
One can add exceptions to it: folders allowed not having package.json. In the example below one such folder is "packages".
One can run it as a "preinstall" script.
const path = require('path')
const fs = require('fs')
const child_process = require('child_process')
const root = process.cwd()
npm_install_recursive(root)
// Since this script is intended to be run as a "preinstall" command,
// it will do `npm install` automatically inside the root folder in the end.
console.log('===================================================================')
console.log(`Performing "npm install" inside root folder`)
console.log('===================================================================')
// Recurses into a folder
function npm_install_recursive(folder)
{
const has_package_json = fs.existsSync(path.join(folder, 'package.json'))
// Abort if there's no `package.json` in this folder and it's not a "packages" folder
if (!has_package_json && path.basename(folder) !== 'packages')
{
return
}
// If there is `package.json` in this folder then perform `npm install`.
//
// Since this script is intended to be run as a "preinstall" command,
// skip the root folder, because it will be `npm install`ed in the end.
// Hence the `folder !== root` condition.
//
if (has_package_json && folder !== root)
{
console.log('===================================================================')
console.log(`Performing "npm install" inside ${folder === root ? 'root folder' : './' + path.relative(root, folder)}`)
console.log('===================================================================')
npm_install(folder)
}
// Recurse into subfolders
for (let subfolder of subfolders(folder))
{
npm_install_recursive(subfolder)
}
}
// Performs `npm install`
function npm_install(where)
{
child_process.execSync('npm install', { cwd: where, env: process.env, stdio: 'inherit' })
}
// Lists subfolders in a folder
function subfolders(folder)
{
return fs.readdirSync(folder)
.filter(subfolder => fs.statSync(path.join(folder, subfolder)).isDirectory())
.filter(subfolder => subfolder !== 'node_modules' && subfolder[0] !== '.')
.map(subfolder => path.join(folder, subfolder))
}
If you have find utility on your system, you could try running the following command in your application root directory:
find . ! -path "*/node_modules/*" -name "package.json" -execdir npm install \;
Basically, find all package.json files and run npm install in that directory, skipping all node_modules directories.
EDIT As mentioned by fgblomqvist in comments, npm now supports workspaces too.
Some of the answers are quite old. I think nowadays we have some new options available to setup monorepos.
I would suggest using yarn workspaces:
Workspaces are a new way to set up your package architecture that’s available by default starting from Yarn 1.0. It allows you to setup multiple packages in such a way that you only need to run yarn install once to install all of them in a single pass.
If you prefer or have to stay with npm, I suggest taking a look at lerna:
Lerna is a tool that optimizes the workflow around managing multi-package repositories with git and npm.
lerna works perfect with yarn workspaces too - article. I've just finished setting up a monorepo project - example.
And here is an example of a multi-package project configured to use npm + lerna - MDC Web: they run lerna bootstrap using package.json's postinstall.
Adding Windows support to snozza's answer, as well as skipping of node_modules folder if present.
var fs = require('fs')
var resolve = require('path').resolve
var join = require('path').join
var cp = require('child_process')
// get library path
var lib = resolve(__dirname, '../lib/')
fs.readdirSync(lib)
.forEach(function (mod) {
var modPath = join(lib, mod)
// ensure path has package.json
if (!mod === 'node_modules' && !fs.existsSync(join(modPath, 'package.json'))) return
// Determine OS and set command accordingly
const cmd = /^win/.test(process.platform) ? 'npm.cmd' : 'npm';
// install folder
cp.spawn(cmd, ['i'], { env: process.env, cwd: modPath, stdio: 'inherit' })
})
Inspired by the scripts provided here, I built a configurable example which:
can be setup to use yarn or npm
can be setup to determine the command to use based on lock files so that if you set it to use yarn but a directory only has a package-lock.json it will use npm for that directory (defaults to true).
configure logging
runs installations in parallel using cp.spawn
can do dry runs to let you see what it would do first
can be run as a function or auto run using env vars
when run as a function, optionally provide array of directories to check
returns a promise that resolves when completed
allows setting max depth to look if needed
knows to stop recursing if it finds a folder with yarn workspaces (configurable)
allows skipping directories using a comma separated env var or by passing the config an array of strings to match against or a function which receives the file name, file path, and the fs.Dirent obj and expects a boolean result.
const path = require('path');
const { promises: fs } = require('fs');
const cp = require('child_process');
// if you want to have it automatically run based upon
// process.cwd()
const AUTO_RUN = Boolean(process.env.RI_AUTO_RUN);
/**
* Creates a config object from environment variables which can then be
* overriden if executing via its exported function (config as second arg)
*/
const getConfig = (config = {}) => ({
// we want to use yarn by default but RI_USE_YARN=false will
// use npm instead
useYarn: process.env.RI_USE_YARN !== 'false',
// should we handle yarn workspaces? if this is true (default)
// then we will stop recursing if a package.json has the "workspaces"
// property and we will allow `yarn` to do its thing.
yarnWorkspaces: process.env.RI_YARN_WORKSPACES !== 'false',
// if truthy, will run extra checks to see if there is a package-lock.json
// or yarn.lock file in a given directory and use that installer if so.
detectLockFiles: process.env.RI_DETECT_LOCK_FILES !== 'false',
// what kind of logging should be done on the spawned processes?
// if this exists and it is not errors it will log everything
// otherwise it will only log stderr and spawn errors
log: process.env.RI_LOG || 'errors',
// max depth to recurse?
maxDepth: process.env.RI_MAX_DEPTH || Infinity,
// do not install at the root directory?
ignoreRoot: Boolean(process.env.RI_IGNORE_ROOT),
// an array (or comma separated string for env var) of directories
// to skip while recursing. if array, can pass functions which
// return a boolean after receiving the dir path and fs.Dirent args
// #see https://nodejs.org/api/fs.html#fs_class_fs_dirent
skipDirectories: process.env.RI_SKIP_DIRS
? process.env.RI_SKIP_DIRS.split(',').map(str => str.trim())
: undefined,
// just run through and log the actions that would be taken?
dry: Boolean(process.env.RI_DRY_RUN),
...config
});
function handleSpawnedProcess(dir, log, proc) {
return new Promise((resolve, reject) => {
proc.on('error', error => {
console.log(`
----------------
[RI] | [ERROR] | Failed to Spawn Process
- Path: ${dir}
- Reason: ${error.message}
----------------
`);
reject(error);
});
if (log) {
proc.stderr.on('data', data => {
console.error(`[RI] | [${dir}] | ${data}`);
});
}
if (log && log !== 'errors') {
proc.stdout.on('data', data => {
console.log(`[RI] | [${dir}] | ${data}`);
});
}
proc.on('close', code => {
if (log && log !== 'errors') {
console.log(`
----------------
[RI] | [COMPLETE] | Spawned Process Closed
- Path: ${dir}
- Code: ${code}
----------------
`);
}
if (code === 0) {
resolve();
} else {
reject(
new Error(
`[RI] | [ERROR] | [${dir}] | failed to install with exit code ${code}`
)
);
}
});
});
}
async function recurseDirectory(rootDir, config) {
const {
useYarn,
yarnWorkspaces,
detectLockFiles,
log,
maxDepth,
ignoreRoot,
skipDirectories,
dry
} = config;
const installPromises = [];
function install(cmd, folder, relativeDir) {
const proc = cp.spawn(cmd, ['install'], {
cwd: folder,
env: process.env
});
installPromises.push(handleSpawnedProcess(relativeDir, log, proc));
}
function shouldSkipFile(filePath, file) {
if (!file.isDirectory() || file.name === 'node_modules') {
return true;
}
if (!skipDirectories) {
return false;
}
return skipDirectories.some(check =>
typeof check === 'function' ? check(filePath, file) : check === file.name
);
}
async function getInstallCommand(folder) {
let cmd = useYarn ? 'yarn' : 'npm';
if (detectLockFiles) {
const [hasYarnLock, hasPackageLock] = await Promise.all([
fs
.readFile(path.join(folder, 'yarn.lock'))
.then(() => true)
.catch(() => false),
fs
.readFile(path.join(folder, 'package-lock.json'))
.then(() => true)
.catch(() => false)
]);
if (cmd === 'yarn' && !hasYarnLock && hasPackageLock) {
cmd = 'npm';
} else if (cmd === 'npm' && !hasPackageLock && hasYarnLock) {
cmd = 'yarn';
}
}
return cmd;
}
async function installRecursively(folder, depth = 0) {
if (dry || (log && log !== 'errors')) {
console.log('[RI] | Check Directory --> ', folder);
}
let pkg;
if (folder !== rootDir || !ignoreRoot) {
try {
// Check if package.json exists, if it doesnt this will error and move on
pkg = JSON.parse(await fs.readFile(path.join(folder, 'package.json')));
// get the command that we should use. if lock checking is enabled it will
// also determine what installer to use based on the available lock files
const cmd = await getInstallCommand(folder);
const relativeDir = `${path.basename(rootDir)} -> ./${path.relative(
rootDir,
folder
)}`;
if (dry || (log && log !== 'errors')) {
console.log(
`[RI] | Performing (${cmd} install) at path "${relativeDir}"`
);
}
if (!dry) {
install(cmd, folder, relativeDir);
}
} catch {
// do nothing when error caught as it simply indicates package.json likely doesnt
// exist.
}
}
if (
depth >= maxDepth ||
(pkg && useYarn && yarnWorkspaces && pkg.workspaces)
) {
// if we have reached maxDepth or if our package.json in the current directory
// contains yarn workspaces then we use yarn for installing then this is the last
// directory we will attempt to install.
return;
}
const files = await fs.readdir(folder, { withFileTypes: true });
return Promise.all(
files.map(file => {
const filePath = path.join(folder, file.name);
return shouldSkipFile(filePath, file)
? undefined
: installRecursively(filePath, depth + 1);
})
);
}
await installRecursively(rootDir);
await Promise.all(installPromises);
}
async function startRecursiveInstall(directories, _config) {
const config = getConfig(_config);
const promise = Array.isArray(directories)
? Promise.all(directories.map(rootDir => recurseDirectory(rootDir, config)))
: recurseDirectory(directories, config);
await promise;
}
if (AUTO_RUN) {
startRecursiveInstall(process.cwd());
}
module.exports = startRecursiveInstall;
And with it being used:
const installRecursively = require('./recursive-install');
installRecursively(process.cwd(), { dry: true })
find . -maxdepth 1 -type d \( ! -name . \) -exec bash -c "cd '{}' && npm install" \;
[For macOS, Linux users]:
I created a bash file to install all dependencies in the project and nested folder.
find . -name node_modules -prune -o -name package.json -execdir npm install \;
Explain: In the root directory, exclude the node_modules folder (even inside nested folders), find the directory that has the package.json file then run the npm install command.
In case you just want to find on specified folders (eg: abc123, def456 folder), run as below:
find ./abc123/* ./def456/* -name node_modules -prune -o -name package.json -execdir npm install \;
To run npm install on every subdirectory you can do something like:
"scripts": {
...
"install:all": "for D in */; do npm install --cwd \"${D}\"; done"
}
where
install:all is just the name of the script, you can name it whatever you please
D Is the name of the directory at the current iteration
*/ Specifies where you want to look for subdirectories. directory/*/ will list all directories inside directory/ and directory/*/*/ will list all directories two levels in.
npm install -cwd install all dependencies in the given folder
You could also run several commands, for example:
for D in */; do echo \"Installing stuff on ${D}\" && npm install --cwd \"${D}\"; done
will print "Installing stuff on your_subfolder/" on every iteration.
This works for yarn too
Any language that can get a list of directories and run shell commands can do this for you.
I know it isn't the answer OP was going for exactly, but it's one that will always work. You need to create an array of subdirectory names, then loop over them and run npm i, or whatever command you're needing to run.
For reference, I tried npm i **/, which just installed the modules from all the subdirectories in the parent. It's unintuitive as hell, but needless to say it's not the solution you need.
I'm having trouble getting this to work and even finding solutions through google on how to make it work.
Going to Bowers site shows they have a programatic API that looks like I should be able to run it in node, of course I can however it's not obeying my .bowerrc file and installing them into my dev folder created by yeoman.
Does this have something to do with the way yeoman works? Are the files and directories not quite available yet until after it's logged done()?
Here is my index.js
'use strict';
var util = require('util');
var path = require('path');
var yeoman = require('yeoman-generator');
var chalk = require('chalk');
var bower = require('bower');
var FoprojectGenerator = yeoman.generators.Base.extend({
sayHello: function(){
console.log(this.yeoman);
},
scaffoldFolders: function(){
this.mkdir("working");
this.mkdir("working/assets");
this.mkdir("working/assets/sass");
this.mkdir("working/assets/coffee");
this.mkdir('dev');
},
copyMainFiles: function(){
this.copy("_index.html", "working/index.html");
this.copy("_gruntfile.js", "Gruntfile.js");
this.copy("_package.json", "package.json");
this.copy("_bower.json", "bower.json");
this.copy("_.bowerrc", ".bowerrc");
this.copy("assets/sass/_site.sass", "working/assets/sass/site.sass");
this.copy("assets/sass/_mixins.sass", "working/assets/sass/_mixins.sass");
this.copy("assets/sass/_normalize.sass", "working/assets/sass/_normalize.sass");
this.copy("assets/coffee/_scripts.coffee", "working/assets/coffee/scripts.coffee");
},
installDependencies: function(){
var done = this.async();
console.log("\nInstalling Node Dependencies\n");
this.npmInstall("", function(){
console.log("\nInstalling Bower Packages\n");
bower.commands
.install()
.on('end', function(){
done();
});
});
}
});
module.exports = FoprojectGenerator;
Like I said it runs great, but it installs it next to the bower.json as apposed to in the dev folder like I've defined in the .bowerrc file like so
{
"directory": "dev/bower_components"
}
When I run bower install after yeoman is done it installs the bower_components folder in the dev folder like it should.
Any guidance would be greatly appreciated!
Maybe the Yeoman generator you use could force you to install deps on a certian specific path.
Open Terminal and go to the root directory of your app.
type ls -la.
if you see .yo-rc.json, type cat .yo-rc.json.
Does it show any particular config for the path like below?
{
"generator-backbone-laser": {
"appPath": "app"
}
}
if so, delete .yo-rc.json.
a creator of Yoeman's generator sometimes set app path on that file.