The best way to run npm install for nested folders? - node.js

What is the most correct way to install npm packages in nested sub folders?
my-app
/my-sub-module
package.json
package.json
What is the best way to have packages in /my-sub-module be installed automatically when npm install run in my-app?

I prefer using post-install, if you know the names of the nested subdir. In package.json:
"scripts": {
"postinstall": "cd nested_dir && npm install",
...
}

Per #Scott's answer, the install|postinstall script is the simplest way as long as sub-directory names are known. This is how I run it for multiple sub dirs. For example, pretend we have api/, web/ and shared/ sub-projects in a monorepo root:
// In monorepo root package.json
{
...
"scripts": {
"postinstall": "(cd api && npm install); (cd web && npm install); (cd shared && npm install)"
},
}
On Windows, replace the ; between the parentesis with &&.
// In monorepo root package.json
{
...
"scripts": {
"postinstall": "(cd api && npm install) && (cd web && npm install) && (cd shared && npm install)"
},
}

Use Case 1: If you want be able to run npm commands from within each subdirectory (where each package.json is), you will need to use postinstall.
As I often use npm-run-all anyway, I use it to keep it nice and short (the part in the postinstall):
{
"install:demo": "cd projects/demo && npm install",
"install:design": "cd projects/design && npm install",
"install:utils": "cd projects/utils && npm install",
"postinstall": "run-p install:*"
}
This has the added benefit that I can install all at once, or individually. If you don't need this or don't want npm-run-all as a dependency, check out demisx's answer (using subshells in postinstall).
Use Case 2: If you will be running all npm commands from the root directory (and, for example, won't be using npm scripts in subdirectories), you could simply install each subdirectory like you would any dependecy:
npm install path/to/any/directory/with/a/package-json
In the latter case, don't be surprised that you don't find any node_modules or package-lock.json file in the sub-directories - all packages will be installed in the root node_modules, which is why you won't be able to run your npm commands (that require dependencies) from any of your subdirectories.
If you're not sure, use case 1 always works.

If you want to run a single command to install npm packages in nested subfolders, you can run a script via npm and main package.json in your root directory. The script will visit every subdirectory and run npm install.
Below is a .js script that will achieve the desired result:
var fs = require('fs');
var resolve = require('path').resolve;
var join = require('path').join;
var cp = require('child_process');
var os = require('os');
// get library path
var lib = resolve(__dirname, '../lib/');
fs.readdirSync(lib).forEach(function(mod) {
var modPath = join(lib, mod);
// ensure path has package.json
if (!fs.existsSync(join(modPath, 'package.json'))) {
return;
}
// npm binary based on OS
var npmCmd = os.platform().startsWith('win') ? 'npm.cmd' : 'npm';
// install folder
cp.spawn(npmCmd, ['i'], {
env: process.env,
cwd: modPath,
stdio: 'inherit'
});
})
Note that this is an example taken from a StrongLoop article that specifically addresses a modular node.js project structure (including nested components and package.json files).
As suggested, you could also achieve the same thing with a bash script.
EDIT: Made the code work in Windows

Just for reference in case people come across this question. You can now:
Add a package.json to a subfolder
Install this subfolder as reference-link in the main package.json:
npm install --save path/to/my/subfolder

The accepted answer works, but you can use --prefix to run npm commands in a selected location.
"postinstall": "npm --prefix ./nested_dir install"
And --prefix works for any npm command, not just install.
You can also view the current prefix with
npm prefix
And set your global install (-g) folder with
npm config set prefix "folder_path"
Maybe TMI, but you get the idea...

My solution is very similar.
Pure Node.js
The following script examines all subfolders (recursively) as long as they have package.json and runs npm install in each of them.
One can add exceptions to it: folders allowed not having package.json. In the example below one such folder is "packages".
One can run it as a "preinstall" script.
const path = require('path')
const fs = require('fs')
const child_process = require('child_process')
const root = process.cwd()
npm_install_recursive(root)
// Since this script is intended to be run as a "preinstall" command,
// it will do `npm install` automatically inside the root folder in the end.
console.log('===================================================================')
console.log(`Performing "npm install" inside root folder`)
console.log('===================================================================')
// Recurses into a folder
function npm_install_recursive(folder)
{
const has_package_json = fs.existsSync(path.join(folder, 'package.json'))
// Abort if there's no `package.json` in this folder and it's not a "packages" folder
if (!has_package_json && path.basename(folder) !== 'packages')
{
return
}
// If there is `package.json` in this folder then perform `npm install`.
//
// Since this script is intended to be run as a "preinstall" command,
// skip the root folder, because it will be `npm install`ed in the end.
// Hence the `folder !== root` condition.
//
if (has_package_json && folder !== root)
{
console.log('===================================================================')
console.log(`Performing "npm install" inside ${folder === root ? 'root folder' : './' + path.relative(root, folder)}`)
console.log('===================================================================')
npm_install(folder)
}
// Recurse into subfolders
for (let subfolder of subfolders(folder))
{
npm_install_recursive(subfolder)
}
}
// Performs `npm install`
function npm_install(where)
{
child_process.execSync('npm install', { cwd: where, env: process.env, stdio: 'inherit' })
}
// Lists subfolders in a folder
function subfolders(folder)
{
return fs.readdirSync(folder)
.filter(subfolder => fs.statSync(path.join(folder, subfolder)).isDirectory())
.filter(subfolder => subfolder !== 'node_modules' && subfolder[0] !== '.')
.map(subfolder => path.join(folder, subfolder))
}

If you have find utility on your system, you could try running the following command in your application root directory:
find . ! -path "*/node_modules/*" -name "package.json" -execdir npm install \;
Basically, find all package.json files and run npm install in that directory, skipping all node_modules directories.

EDIT As mentioned by fgblomqvist in comments, npm now supports workspaces too.
Some of the answers are quite old. I think nowadays we have some new options available to setup monorepos.
I would suggest using yarn workspaces:
Workspaces are a new way to set up your package architecture that’s available by default starting from Yarn 1.0. It allows you to setup multiple packages in such a way that you only need to run yarn install once to install all of them in a single pass.
If you prefer or have to stay with npm, I suggest taking a look at lerna:
Lerna is a tool that optimizes the workflow around managing multi-package repositories with git and npm.
lerna works perfect with yarn workspaces too - article. I've just finished setting up a monorepo project - example.
And here is an example of a multi-package project configured to use npm + lerna - MDC Web: they run lerna bootstrap using package.json's postinstall.

Adding Windows support to snozza's answer, as well as skipping of node_modules folder if present.
var fs = require('fs')
var resolve = require('path').resolve
var join = require('path').join
var cp = require('child_process')
// get library path
var lib = resolve(__dirname, '../lib/')
fs.readdirSync(lib)
.forEach(function (mod) {
var modPath = join(lib, mod)
// ensure path has package.json
if (!mod === 'node_modules' && !fs.existsSync(join(modPath, 'package.json'))) return
// Determine OS and set command accordingly
const cmd = /^win/.test(process.platform) ? 'npm.cmd' : 'npm';
// install folder
cp.spawn(cmd, ['i'], { env: process.env, cwd: modPath, stdio: 'inherit' })
})

Inspired by the scripts provided here, I built a configurable example which:
can be setup to use yarn or npm
can be setup to determine the command to use based on lock files so that if you set it to use yarn but a directory only has a package-lock.json it will use npm for that directory (defaults to true).
configure logging
runs installations in parallel using cp.spawn
can do dry runs to let you see what it would do first
can be run as a function or auto run using env vars
when run as a function, optionally provide array of directories to check
returns a promise that resolves when completed
allows setting max depth to look if needed
knows to stop recursing if it finds a folder with yarn workspaces (configurable)
allows skipping directories using a comma separated env var or by passing the config an array of strings to match against or a function which receives the file name, file path, and the fs.Dirent obj and expects a boolean result.
const path = require('path');
const { promises: fs } = require('fs');
const cp = require('child_process');
// if you want to have it automatically run based upon
// process.cwd()
const AUTO_RUN = Boolean(process.env.RI_AUTO_RUN);
/**
* Creates a config object from environment variables which can then be
* overriden if executing via its exported function (config as second arg)
*/
const getConfig = (config = {}) => ({
// we want to use yarn by default but RI_USE_YARN=false will
// use npm instead
useYarn: process.env.RI_USE_YARN !== 'false',
// should we handle yarn workspaces? if this is true (default)
// then we will stop recursing if a package.json has the "workspaces"
// property and we will allow `yarn` to do its thing.
yarnWorkspaces: process.env.RI_YARN_WORKSPACES !== 'false',
// if truthy, will run extra checks to see if there is a package-lock.json
// or yarn.lock file in a given directory and use that installer if so.
detectLockFiles: process.env.RI_DETECT_LOCK_FILES !== 'false',
// what kind of logging should be done on the spawned processes?
// if this exists and it is not errors it will log everything
// otherwise it will only log stderr and spawn errors
log: process.env.RI_LOG || 'errors',
// max depth to recurse?
maxDepth: process.env.RI_MAX_DEPTH || Infinity,
// do not install at the root directory?
ignoreRoot: Boolean(process.env.RI_IGNORE_ROOT),
// an array (or comma separated string for env var) of directories
// to skip while recursing. if array, can pass functions which
// return a boolean after receiving the dir path and fs.Dirent args
// #see https://nodejs.org/api/fs.html#fs_class_fs_dirent
skipDirectories: process.env.RI_SKIP_DIRS
? process.env.RI_SKIP_DIRS.split(',').map(str => str.trim())
: undefined,
// just run through and log the actions that would be taken?
dry: Boolean(process.env.RI_DRY_RUN),
...config
});
function handleSpawnedProcess(dir, log, proc) {
return new Promise((resolve, reject) => {
proc.on('error', error => {
console.log(`
----------------
[RI] | [ERROR] | Failed to Spawn Process
- Path: ${dir}
- Reason: ${error.message}
----------------
`);
reject(error);
});
if (log) {
proc.stderr.on('data', data => {
console.error(`[RI] | [${dir}] | ${data}`);
});
}
if (log && log !== 'errors') {
proc.stdout.on('data', data => {
console.log(`[RI] | [${dir}] | ${data}`);
});
}
proc.on('close', code => {
if (log && log !== 'errors') {
console.log(`
----------------
[RI] | [COMPLETE] | Spawned Process Closed
- Path: ${dir}
- Code: ${code}
----------------
`);
}
if (code === 0) {
resolve();
} else {
reject(
new Error(
`[RI] | [ERROR] | [${dir}] | failed to install with exit code ${code}`
)
);
}
});
});
}
async function recurseDirectory(rootDir, config) {
const {
useYarn,
yarnWorkspaces,
detectLockFiles,
log,
maxDepth,
ignoreRoot,
skipDirectories,
dry
} = config;
const installPromises = [];
function install(cmd, folder, relativeDir) {
const proc = cp.spawn(cmd, ['install'], {
cwd: folder,
env: process.env
});
installPromises.push(handleSpawnedProcess(relativeDir, log, proc));
}
function shouldSkipFile(filePath, file) {
if (!file.isDirectory() || file.name === 'node_modules') {
return true;
}
if (!skipDirectories) {
return false;
}
return skipDirectories.some(check =>
typeof check === 'function' ? check(filePath, file) : check === file.name
);
}
async function getInstallCommand(folder) {
let cmd = useYarn ? 'yarn' : 'npm';
if (detectLockFiles) {
const [hasYarnLock, hasPackageLock] = await Promise.all([
fs
.readFile(path.join(folder, 'yarn.lock'))
.then(() => true)
.catch(() => false),
fs
.readFile(path.join(folder, 'package-lock.json'))
.then(() => true)
.catch(() => false)
]);
if (cmd === 'yarn' && !hasYarnLock && hasPackageLock) {
cmd = 'npm';
} else if (cmd === 'npm' && !hasPackageLock && hasYarnLock) {
cmd = 'yarn';
}
}
return cmd;
}
async function installRecursively(folder, depth = 0) {
if (dry || (log && log !== 'errors')) {
console.log('[RI] | Check Directory --> ', folder);
}
let pkg;
if (folder !== rootDir || !ignoreRoot) {
try {
// Check if package.json exists, if it doesnt this will error and move on
pkg = JSON.parse(await fs.readFile(path.join(folder, 'package.json')));
// get the command that we should use. if lock checking is enabled it will
// also determine what installer to use based on the available lock files
const cmd = await getInstallCommand(folder);
const relativeDir = `${path.basename(rootDir)} -> ./${path.relative(
rootDir,
folder
)}`;
if (dry || (log && log !== 'errors')) {
console.log(
`[RI] | Performing (${cmd} install) at path "${relativeDir}"`
);
}
if (!dry) {
install(cmd, folder, relativeDir);
}
} catch {
// do nothing when error caught as it simply indicates package.json likely doesnt
// exist.
}
}
if (
depth >= maxDepth ||
(pkg && useYarn && yarnWorkspaces && pkg.workspaces)
) {
// if we have reached maxDepth or if our package.json in the current directory
// contains yarn workspaces then we use yarn for installing then this is the last
// directory we will attempt to install.
return;
}
const files = await fs.readdir(folder, { withFileTypes: true });
return Promise.all(
files.map(file => {
const filePath = path.join(folder, file.name);
return shouldSkipFile(filePath, file)
? undefined
: installRecursively(filePath, depth + 1);
})
);
}
await installRecursively(rootDir);
await Promise.all(installPromises);
}
async function startRecursiveInstall(directories, _config) {
const config = getConfig(_config);
const promise = Array.isArray(directories)
? Promise.all(directories.map(rootDir => recurseDirectory(rootDir, config)))
: recurseDirectory(directories, config);
await promise;
}
if (AUTO_RUN) {
startRecursiveInstall(process.cwd());
}
module.exports = startRecursiveInstall;
And with it being used:
const installRecursively = require('./recursive-install');
installRecursively(process.cwd(), { dry: true })

find . -maxdepth 1 -type d \( ! -name . \) -exec bash -c "cd '{}' && npm install" \;

[For macOS, Linux users]:
I created a bash file to install all dependencies in the project and nested folder.
find . -name node_modules -prune -o -name package.json -execdir npm install \;
Explain: In the root directory, exclude the node_modules folder (even inside nested folders), find the directory that has the package.json file then run the npm install command.
In case you just want to find on specified folders (eg: abc123, def456 folder), run as below:
find ./abc123/* ./def456/* -name node_modules -prune -o -name package.json -execdir npm install \;

To run npm install on every subdirectory you can do something like:
"scripts": {
...
"install:all": "for D in */; do npm install --cwd \"${D}\"; done"
}
where
install:all is just the name of the script, you can name it whatever you please
D Is the name of the directory at the current iteration
*/ Specifies where you want to look for subdirectories. directory/*/ will list all directories inside directory/ and directory/*/*/ will list all directories two levels in.
npm install -cwd install all dependencies in the given folder
You could also run several commands, for example:
for D in */; do echo \"Installing stuff on ${D}\" && npm install --cwd \"${D}\"; done
will print "Installing stuff on your_subfolder/" on every iteration.
This works for yarn too

Any language that can get a list of directories and run shell commands can do this for you.
I know it isn't the answer OP was going for exactly, but it's one that will always work. You need to create an array of subdirectory names, then loop over them and run npm i, or whatever command you're needing to run.
For reference, I tried npm i **/, which just installed the modules from all the subdirectories in the parent. It's unintuitive as hell, but needless to say it's not the solution you need.

Related

npm run build returns Module not found: Error: Can't resolve in my docker container in my NuxtJS app

I've been on this issue for hours now. I kept receiving the error below when I run npm run build
ERROR in ./store/chatroom.js
Module not found: Error: Can't resolve '#/services/ChatRoomService.js' in '/usr/src/app/store'
# ./store/chatroom.js 1:0-60 9:11-26
# ./.nuxt/store.js
# ./.nuxt/index.js
# ./.nuxt/client.js
# multi ./.nuxt/client.js
What's weird is, it is working perfectly on my local alone. The error aboves occur in my docker build and when I run my container with my codebase in it.. However, it is that weirder when I run my container with bind mount on my local and try npm run build it is working properly..
At first I thought that maybe some files from my local are missing but I tried copying every file in my local to my container via docker cp . but it still does not work..
Dockerfile
FROM node:8.12.0
WORKDIR /usr/src/app
EXPOSE 3000
COPY package.json package.json
RUN npm install
# To include everything
COPY . .
RUN npm run build
ENTRYPOINT ["/usr/src/app/entrypoint.sh"]
chatroom.js
import ChatRoomService from "#/services/ChatRoomService.js";
export const state = () => ({});
export const mutations = {};
export const actions = {
getText({ commit }, data) {
return ChatRoomService.queryText(data).then(response => {
if (response.code === 1) {
commit("bbs/SET_TOP_ARR", JSON.parse(response.data.content), {
root: true
});
}
});
}
};
chatRoomService.js
import { mainApiClient, requestSetup } from "#/assets/js/axios.js";
const apiModule = "chatroom";
const resources = {
chatroomGetChatRoomText: "text/queryText"
};
export default {
queryText(body) {
const resource = resources.chatroomGetChatRoomText;
const [api, req] = requestSetup(resource, body, apiModule);
return mainApiClient.post(api, req);
}
};
UPDATE
I have solved it. It was how I imported. I imported chatRoomService.jswhere it should have been ChatRoomService.js. It works just fine in my local because it is Mac but when I put it in a node linux server inside container it is more sensitive

test electron app with spectron and travis

I'm trying to test my electron app https://github.com/rafaelleru/torrent_player with spectron and I try to setup the test of example in https://github.com/electron/spectron ant it pass in my local pc but in travis I don't know how to setup the bin files to test or how tell to travis to generate the bin files.
What is the right way to do this?
It's hard to answer your question if you don't specify exactly what happens. But below is how I solved it, I hope it can be helpful.
This is how I got Spectron running my Electron application in Travis (Linux only):
.travis.yml
os:
- linux
language: node_js
node_js:
- "7.7"
before_script:
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then export DISPLAY=:99.0; fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sh -e /etc/init.d/xvfb start; fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sleep 3; fi
script:
- node --version
- npm --version
- npm install
- npm run e2e
cache:
directories:
- node_modules
notifications:
email:
on_success: never
on_failure: change
utils.js
const electron = require('electron');
const beforeEach = function (env = {}) {
this.app = new Application({
path: electron,
args: ['.'],
});
return this.app.start();
};
const afterEach = function () {
if (this.app && this.app.isRunning()) {
return this.app.stop();
}
return undefined;
};
.e2e.js-files
describe('test case', function () {
beforeEach(testUtils.beforeEach);
afterEach(testUtils.afterEach);
it('should run', function () {
});
});
How to store screenshots of Spectron test failures
If you got your tests running, but they are for some reason failing, then it can be helpful to get a screenshot from the Electron application.
utils.js
const fs = require('fs');
const saveErrorShot = function (e) {
const filename = `errorShot-${this.test.parent.title}-${this.test.title}-${new Date().toISOString()}.png`
.replace(/\s/g, '_')
.replace(/:/g, '');
this.app.browserWindow.capturePage().then(imageBuffer => {
fs.writeFile(filename, imageBuffer, error => {
if (error) throw error;
console.info(`Screenshot saved: ${process.cwd()}/${filename}`);
});
});
throw e;
};
.e2e.js-files
describe('test case', function () {
it('should store a screenshot', function () {
return this.app.client
.then(() => this.app.client.getText('.non-existing-element')
.catch(testUtils.saveErrorShot.bind(this));
});
});
To make Travis send your artifacts to an Amazon S3 bucket
Add your AWS S3 credentials to your Travis env variables, read here for more information.
Add the following to .travis.yml
after_failure:
- curl -sL https://raw.githubusercontent.com/travis-ci/artifacts/master/install | bash
- artifacts upload $(git ls-files -o | grep -Fv -e node_modules)
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then artifacts upload $(ls /home/travis/.npm/_logs/); fi
What it does is that it
Downloads and installs travis-artifacts
Uploads all untracked files in your repository, but excluding all node modules. Probably you'll have some other files you want exclude, then just append -e unnecessary_folder -e unnecesarry_file.
Uploads the npm logs from /home/travis/.npm/_logs. If you're running MacOS the files will appear in another folder.
The answer it's in this issue:
https://github.com/electron/spectron/issues/170

npm install from the parent directory

I have a directory structure like this: /a/b/c
Directory c contains package.json and should contain node_modules.
How can I execute npm install from inside directory a?
I tried this way: npm --prefix b/c install b/c but this way, all the symlinks are created directly inside c instead of the default node_modules/.bin.
Is there any way to achieve that?
node: 6.2.2
npm: 3.10.2
Using an npm pre install hook in a package.json within your a directory is likely the best choice in this situation.
scripts: {
preinstall: `cd b/c && npm install`
}
This way running npm install in directory a will also do the c directory install and provide a seamless dev experience.
A bit overkill but may be useful...
With the help of recursion you can find node_modules.
you could run this file in a parent directory to find node_modules in child directory and pass npm arguments.
Note: tested on Windows
var child_process = require('child_process');
var fs = require('fs');
var path = require('path');
var safe = 0;
let args = process.argv.splice(2).toString().replace(/,/g ,' ');
function recurse(_path){
safe ++;
if(safe > 5000){
console.log('directory may be too large')
return
}
if(/node_modules$/.test(_path)){
let cwd = path.resolve(__dirname ,_path)
console.log('found node_modules at '+cwd)
child_process.exec(`start cmd.exe /k npm ${args}`,{cwd})
return
}
let directoryList = fs.readdirSync(_path);
directoryList.forEach(function(nextDir){
if(fs.statSync(_path+'/'+nextDir).isFile()){
return
}
if(/^\./.test(nextDir)){ //.folder beginging with .
return
}
recurse(_path+'/'+nextDir);
})
}
recurse('./' )

how to remove everything with rimraf except few exceptions

i’m trying to remove all .dot files, except .git and all *.js except one exception.js, but i’m failing
install
git clone git#github.com:iamstarkov/rimraf-test.git
cd rimraf-test
test
npm test # it fails
can anybody help me?
var globby = require('globby');
var rimraf = require('rimraf');
globby(['*', '!authors.js', '!.git', '!dump'])
.then(function then(paths) {
paths.map(function map(item) {
rimraf.sync(item);
});
});

Ionic / bower / cordova - ignore files for build

My project structure is the following:
MyApp
- hooks
- platforms
- android
- ios
- www
- js / css / templates..
- lib (including all bower components)
Right now, the www/lib directory is taking up 21,8 Mb. (I have a large set of bower components added to my project.)
When building each project, the entire www folder is copied to the platform/android (for instance) folder for build, including of course www/lib.
This leads to a very big build, as lots of files included into bower
components are useless for production.
Manually managing all bower dependencies is clearly not an option. So how do you guys manage to clean your project platform directory for build?
I was thinking about creating a hook for that but before writing lines of code in a language that i do not know (nodeJS), I was hoping for your return and advises.
According to Cordova workflow you can add a hook script that removes unnecessary files.
A detailed example of a cleanup script can be found here: https://www.thepolyglotdeveloper.com/2015/01/hooks-apache-cordova-mobile-applications/
But to give a quick step by step summary:
Add to the after_prepare hook folder (/hooks/after_prepare) a script (01_junk_cleanup.js - 01 to be run first, the rest whatever you want) and in the file specify the files and folders you want to delete. For example, here is how you can delete a test folder and relevant files just change to you lib directory and to the files there. Note that this example is a bit different from the example in the link i gave earlier so you might want to take a look there as well.
01_junk_cleanup.js:
#!/usr/bin/env node
var fs = require('fs');
var path = require('path');
var foldersToProcess = [
"js",
"css"
];
var foldersToDelete = [
"test"
];
var filesToDelete = [
"karmaOnBrowser.conf.js",
"karmaOnEmulators.conf.js",
"SpecRunner.html"
];
var iosPlatformsDir = "platforms/ios/www/";
var androidPlatformsDir = "platforms/android/assets/www/";
filesToDelete.forEach(function(file) {
var filePathIOS = iosPlatformsDir + file;
var filePathAndroid = androidPlatformsDir + file;
if(fs.existsSync(filePathIOS)){
fs.unlinkSync(filePathIOS);
};
if(fs.existsSync(filePathAndroid)){
fs.unlinkSync(filePathAndroid);
};
});
foldersToProcess.forEach(function(folder) {
processFiles(iosPlatformsDir + folder);
processFiles(androidPlatformsDir + folder);
});
foldersToDelete.forEach(function(folder) {
deleteFolderRecursive(iosPlatformsDir + folder);
deleteFolderRecursive(androidPlatformsDir + folder);
});
function deleteFolderRecursive(path){
if( fs.existsSync(path) ) {
fs.readdirSync(path).forEach(function(file,index){
var curPath = path + "/" + file;
if(fs.lstatSync(curPath).isDirectory()) { // recurse
deleteFolderRecursive(curPath);
} else { // delete file
fs.unlinkSync(curPath);
}
});
fs.rmdirSync(path);
}
}
function processFiles(dir) {
fs.readdir(dir, function(err, list) {
if(err) {
console.log('processFiles err: ' + err);
return;
}
list.forEach(function(file) {
file = dir + '/' + file;
fs.stat(file, function(err, stat) {
if(!stat.isDirectory()) {
switch(path.basename(file)) {
case ".DS_Store":
fs.unlink(file, function(error) {
console.log("Removed file " + file);
});
break;
case "Thumbs.db":
fs.unlink(file, function(error) {
console.log("Removed file " + file);
});
break;
default:
console.log("Skipping file " + file);
break;
}
}
});
});
});
}
Aside to above, A bit more obvious but I feel worth mentioning anyhow, After having the www/lib bloat as well I always try to keep the folder lean and add only libraries required for deployment, the other dev. dependencies such as jasmine I either hold in the 'node_modules' folder or 'bower_components' as I only install today through them.
Hope this helps,
Good luck
I think the best approach would be to do this:
Move the bower_components folder and your index.html file to the project root, outside the /www folder
Install gulp and gulp-usemin
Wrap all of the .js files and .css files from bower components in usemin <build:js> and <build:css> sections
Configure a task in your gulpfile to concatenate all those files into a lib.js and a lib.css file. Make sure that those two files as well as the rewritten index.html are output to the /www folder
Execute the gulp task before your next build, and each time you add a new bower component.
This will keep your /www folder tidy and only containing the files you need in your cordova build.
With Bower you need to use npm preen to remove unnecessary files
See my example using Gulp with Ionic Framework: https://github.com/jdnichollsc/Ionic-Starter-Template
Basically you can set your bower.json file to indicate the path which files you need, for example:
"preen": {
//... More libraries
"ionic-datepicker": [
"dist/*.js" //You only need these files
//Other files and folders will be deleted to reduce the size of your app
],
"ion-floating-menu": [
"dist/*" //Keep all the files (.html, .css, .js, etc) of the directory.
]
}
Regards, Nicholls
This is an improvement over this answer. I've applied it to my own project.
Move the bower_components folder to the project root, outside the www folder.
Rename index.html to _index.html. We will later make sure that Gulp automatically generates index.html.
Install gulp and gulp-useref.
Edit your _index.html so that it looks something like this:
<!-- build:js dist/js/vendor.js -->
<script src="../bower_components/ionic/release/js/ionic.bundle.min.js"></script>
<script src="../bower_components/ngstorage/ngStorage.min.js"></script>
<script src="../bower_components/ngCordova/dist/ng-cordova.min.js"></script>
<!-- endbuild -->
Configure your gulp watch task to build new index.html file in the www folder with the concatenated files.
var entrypoint = './www/_index.html';
gulp.task('watch', function() {
gulp.watch(entrypoint, ['concat-js-and-css']);
});
gulp.task('concat-js-and-css', function () {
return gulp.src(entrypoint)
.pipe(useref())
.pipe(rename(function (path) {
// rename _index.html to index.html
if (path.basename == '_index' && path.extname == '.html') {
path.basename = "index";
}
}))
.pipe(gulp.dest('./www'))
});
gulp.task('build', ['concat-js-and-css']);
When that task runs, your index.html file will contain just this:
<script src="dist/js/vendor.js"></script>
Edit your ionic.project file so that it looks like the following. This will make sure that gulp watch is run before ionic serve.
{
"watchPatterns": [
"www/_index.html",
],
"gulpStartupTasks": [
"watch"
]
}

Resources