I'm crafting a nodejs application as a cli tool. I'm using yargs to define my commands and I'm using the .config() method to load default configuration settings from a .config or config.json file, like so:
const findUp = require('find-up')
const fs = require('fs')
const yargs = require('yargs');
const configPath = findUp.sync(['.config', 'config.json']);
const config = configPath ? JSON.parse(fs.readFileSync(configPath)) : {};
yargs
.command( require('./command/command-name'))
.option( .... )
.config( config )
.argv
I would like to create a command that I can use to configure my .config file. Something like:
my-tool configure --defaults key1=<value> key2=<value>
Is there an npm package available to do this, or would I have to roll my own?
I rolled my own.
# configure.js
const findUp = require('find-up')
const fs = require('fs')
exports.command = 'configure'
exports.desc = 'Set or view your configuration'
exports.builder = {
'defaults': {
desc: 'set configuration values',
type: 'array',
group: 'Parameters:'
}
}
exports.handler = function (argv) {
const configPath = findUp.sync(['.config', 'config.json']);
const config = configPath ? JSON.parse(fs.readFileSync(configPath)) : {};
if (argv.defaults) {
argv.defaults.map( arg => {
var args = arg.split('=');
if (args[1]) {
config[args[0]] = args[1];
} else {
delete config[args[0]];
}
})
var file = configPath ? configPath : "./.config";
fs.writeFileSync(file, JSON.stringify(config, null, 2));
} else {
console.log( JSON.stringify(config, null, 2));
}
}
Related
I'm trying to create shallow clone using simple-git. I'm trying to create an equivalent of this command: git clone --depth 1 https://github.com/steveukx/git-js.git. My code is as follows:
const git = require('simple-git')()
const repoURL = 'https://github.com/steveukx/git-js.git';
const localPath= './git-js';
const options = ['--depth', '1'];
const handlerFn = () => {
console.log('DONE')
};
git.clone(repoURL, localPath, options, handlerFn());
I've specified --depth 1 in options, but the code copies the entire repo history, it seems to completely ignore the options given. Am I doing this correctly, what can cause this behaviour?
After some digging the issue was in git.clone(repoURL, localPath, options, handlerFn());, you have to pass the reference to function instead of actual callback, like this git.clone(repoURL, localPath, options, handlerFn);.
The full implementation is bellow:
const git = require('simple-git')();
const fs = require('fs')
const url = require('url');
this.gitURL = 'https://github.com/torvalds/linux.git';
const localURL = url.parse(this.gitURL);
const localRepoName = (localURL.hostname + localURL.path)
.replace('com', '')
.replace('/', '')
.replace('/', '.')
.replace('.git', '')
this.localPath = `./${localRepoName}`;
this.options = ['--depth', '1'];
this.callback = () => {
console.log('DONE')
}
if (fs.existsSync(this.localPath)) {
// something
} else {
git.outputHandler((command, stdout, stderr) => {
stdout.pipe(process.stdout);
stderr.pipe(process.stderr)
stdout.on('data', (data) => {
// Print data
console.log(data.toString('utf8'))
})
})
.clone(this.gitURL, this.localPath, this.options, this.callback)
}
I have 'easy-db.js' module with classes inside:
const fs = require('fs');
exports.db = function () {
this.data = {};
this.filename = 'log/db/data.json';
if (fs.existsSync(this.filename)) {
}
}
Why I should use additional brackets:
const db1 = new (require('./easy-db').db)();
console.log(db1); // { data: {}, filename: 'log/db/data.json' }
const db2 = new require('./easy-db').db();
console.log(db2); // undefined why???
Because new require('./easy-db') is called before running the method .db().
In my package.json, I'm running a node module that can only handle one file at a time (I didn't write it). For example:
cleancss somefile.css -o somefile-min.css
I want to be able to do something like:
printfilelist -dir /public/css -files *.css | cleancss {filepath}.{fileext} -o {filename}-min.css
Is there any way to do this?
I recently encountered a similar requirement to run cleancss across multiple files. Eventually opted for a solution similar to the one discussed in the comments, i.e. using the clean-css-api with a utility node.js.
npm-script
"scripts": {
"cleancss": "glob \"public/css/**/*.css\" | node .scripts/cleancss.js"
},
Note the initial use of cli-glob (added to package.json) for obtaining the paths before piping them to cleancss.js.
cleancss.js
The utitlity node script was as follows (albeit rather rudimentary!):
#!/usr/bin/env node
var fs = require('fs');
var path = require('path');
var readline = require('readline');
var CleanCSS = require('clean-css');
var mkdirp = require('mkdirp');
var rl = readline.createInterface({
input: process.stdin,
output: null,
terminal: false
});
var options = {
// ... https://www.npmjs.com/package/clean-css#constructor-options
// ... https://www.npmjs.com/package/clean-css#formatting-options
};
function saveFile(outputPath, minified) {
fs.writeFile(outputPath, minified, function(err) {
if (err) {
return console.log(err);
}
});
}
function pathParts(srcPath) {
var ext = path.extname(srcPath),
name = path.basename(srcPath, ext),
dirPath = path.dirname(srcPath),
pathParts = dirPath.split(path.sep),
pathNoRoot;
pathParts.shift();
pathNoRoot = pathParts.join(path.sep);
return {
ext: ext,
name: name,
pathNoRoot: pathNoRoot
};
}
function minify(srcPath, outputPath) {
var output = new CleanCSS(options).minify([srcPath]);
saveFile(outputPath, output.styles);
}
function processPath(srcPath) {
var outDir = process.env.npm_package_config_css_outdir || path.dirname(srcPath),
p = pathParts(srcPath),
newName = p.name + '.min' + p.ext,
mkDirPath,
outputPath;
if (process.env.npm_package_config_css_outdir) {
mkDirPath = outDir + path.sep + p.pathNoRoot + path.sep;
mkdirp(mkDirPath, function() {
outputPath = mkDirPath + newName;
minify(srcPath, outputPath);
});
} else {
outputPath = outDir + path.sep + newName;
minify(srcPath, outputPath);
}
}
rl.on('line', function(srcPath) {
processPath(srcPath);
});
Additional info
By default the .min.css files are output to the same path as the .css source file.
However, if you need to change the output destination then you can utilize the config object in package.json. The following example will output all .css.min to the ./dist/ path:
"config": {
"css_outdir": "./dist"
},
"scripts": {
"cleancss": "glob \"public/css/**/*.css\" | node .scripts/cleancss.js"
},
cleancss.js utilizes mkdirp to replicate/mirror the .css source paths/subfolders in the destination folder when using config.css_outdir.
Using Babel to Get ApolloClient to ES5 CommonJS Module Format
Im trying to use Babel to get the apollo-client module to work as ES5 in a non-browser, node environment. I've gone through step below which always give me the same result. Im trying to figure out if that result is right result for a node environment. When I import the babel processed documents into my project and call a method that should be exported, im getting, cannot find module. For context, the project is a fusetools.com demo. Fusetools does not support ES2015 Promises so the idea is that with the babel es2015 preset, it should work. I'm mostly chasing this down to learn something but it would be great if I could get it to work. Any comments on an easier way to do this, now that I understand it better, would be greatly appreciated. The project where I babeled the code can be found here. The fusetools project where i used the transformed code is here.
The error I get is :
LOG: Error: JavaScript error in MainView.ux line 9: Name: Fuse.Scripting.Error
Error message: require(): module not found: js/apollo-client/ApolloClient.js
File name: MainView.ux
Line number: 9
Source line: var ApolloClient = require('js/apollo-client/ApolloClient.js');
This is the code im trying to reach:
```
"use strict";
var networkInterface_1 = require('./transport/networkInterface');
var isUndefined = require('lodash.isundefined');
var assign = require('lodash.assign');
var isString = require('lodash.isstring');
var store_1 = require('./store');
var QueryManager_1 = require('./core/QueryManager');
var storeUtils_1 = require('./data/storeUtils');
var fragments_1 = require('./fragments');
var getFromAST_1 = require('./queries/getFromAST');
var DEFAULT_REDUX_ROOT_KEY = 'apollo';
function defaultReduxRootSelector(state) {
return state[DEFAULT_REDUX_ROOT_KEY];
}
var ApolloClient = function () {
function ApolloClient(_a) {
var _this = this;
var _b = _a === void 0 ? {} : _a,
networkInterface = _b.networkInterface,
reduxRootKey = _b.reduxRootKey,
reduxRootSelector = _b.reduxRootSelector,
initialState = _b.initialState,
dataIdFromObject = _b.dataIdFromObject,
resultTransformer = _b.resultTransformer,
resultComparator = _b.resultComparator,
_c = _b.ssrMode,
ssrMode = _c === void 0 ? false : _c,
_d = _b.ssrForceFetchDelay,
ssrForceFetchDelay = _d === void 0 ? 0 : _d,
_e = _b.mutationBehaviorReducers,
mutationBehaviorReducers = _e === void 0 ? {} : _e,
_f = _b.addTypename,
addTypename = _f === void 0 ? true : _f,
queryTransformer = _b.queryTransformer;
this.middleware = function () {
return function (store) {
_this.setStore(store);
return function (next) {
return function (action) {
var returnValue = next(action);
_this.queryManager.broadcastNewStore(store.getState());
return returnValue;
};
};
};
};
if (reduxRootKey && reduxRootSelector) {
throw new Error('Both "reduxRootKey" and "reduxRootSelector" are configured, but only one of two is allowed.');
}
if (reduxRootKey) {
console.warn('"reduxRootKey" option is deprecated and might be removed in the upcoming versions, ' + 'please use the "reduxRootSelector" instead.');
this.reduxRootKey = reduxRootKey;
}
if (queryTransformer) {
throw new Error('queryTransformer option no longer supported in Apollo Client 0.5. ' + 'Instead, there is a new "addTypename" option, which is on by default.');
}
if (!reduxRootSelector && reduxRootKey) {
this.reduxRootSelector = function (state) {
return state[reduxRootKey];
};
} else if (isString(reduxRootSelector)) {
this.reduxRootKey = reduxRootSelector;
this.reduxRootSelector = function (state) {
return state[reduxRootSelector];
};
} else if (typeof reduxRootSelector === 'function') {
this.reduxRootSelector = reduxRootSelector;
} else {
this.reduxRootSelector = null;
}
this.initialState = initialState ? initialState : {};
this.networkInterface = networkInterface ? networkInterface : networkInterface_1.createNetworkInterface({ uri: '/graphql' });
this.addTypename = addTypename;
this.resultTransformer = resultTransformer;
this.resultComparator = resultComparator;
this.shouldForceFetch = !(ssrMode || ssrForceFetchDelay > 0);
this.dataId = dataIdFromObject;
this.fieldWithArgs = storeUtils_1.storeKeyNameFromFieldNameAndArgs;
if (ssrForceFetchDelay) {
setTimeout(function () {
return _this.shouldForceFetch = true;
}, ssrForceFetchDelay);
}
this.reducerConfig = {
dataIdFromObject: dataIdFromObject,
mutationBehaviorReducers: mutationBehaviorReducers
};
this.watchQuery = this.watchQuery.bind(this);
this.query = this.query.bind(this);
this.mutate = this.mutate.bind(this);
this.setStore = this.setStore.bind(this);
this.resetStore = this.resetStore.bind(this);
}
ApolloClient.prototype.watchQuery = function (options) {
this.initStore();
if (!this.shouldForceFetch && options.forceFetch) {
options = assign({}, options, {
forceFetch: false
});
}
fragments_1.createFragment(options.query);
var fullDocument = getFromAST_1.addFragmentsToDocument(options.query, options.fragments);
var realOptions = Object.assign({}, options, {
query: fullDocument
});
delete realOptions.fragments;
return this.queryManager.watchQuery(realOptions);
};
;
ApolloClient.prototype.query = function (options) {
this.initStore();
if (!this.shouldForceFetch && options.forceFetch) {
options = assign({}, options, {
forceFetch: false
});
}
fragments_1.createFragment(options.query);
var fullDocument = getFromAST_1.addFragmentsToDocument(options.query, options.fragments);
var realOptions = Object.assign({}, options, {
query: fullDocument
});
delete realOptions.fragments;
return this.queryManager.query(realOptions);
};
;
ApolloClient.prototype.mutate = function (options) {
this.initStore();
var fullDocument = getFromAST_1.addFragmentsToDocument(options.mutation, options.fragments);
var realOptions = Object.assign({}, options, {
mutation: fullDocument
});
delete realOptions.fragments;
return this.queryManager.mutate(realOptions);
};
;
ApolloClient.prototype.subscribe = function (options) {
this.initStore();
var fullDocument = getFromAST_1.addFragmentsToDocument(options.query, options.fragments);
var realOptions = Object.assign({}, options, {
document: fullDocument
});
delete realOptions.fragments;
delete realOptions.query;
return this.queryManager.startGraphQLSubscription(realOptions);
};
ApolloClient.prototype.reducer = function () {
return store_1.createApolloReducer(this.reducerConfig);
};
ApolloClient.prototype.initStore = function () {
if (this.store) {
return;
}
if (this.reduxRootSelector) {
throw new Error('Cannot initialize the store because "reduxRootSelector" or "reduxRootKey" is provided. ' + 'They should only be used when the store is created outside of the client. ' + 'This may lead to unexpected results when querying the store internally. ' + "Please remove that option from ApolloClient constructor.");
}
this.setStore(store_1.createApolloStore({
reduxRootKey: DEFAULT_REDUX_ROOT_KEY,
initialState: this.initialState,
config: this.reducerConfig
}));
this.reduxRootKey = DEFAULT_REDUX_ROOT_KEY;
};
;
ApolloClient.prototype.resetStore = function () {
this.queryManager.resetStore();
};
;
ApolloClient.prototype.setStore = function (store) {
var reduxRootSelector;
if (this.reduxRootSelector) {
reduxRootSelector = this.reduxRootSelector;
} else {
reduxRootSelector = defaultReduxRootSelector;
this.reduxRootKey = DEFAULT_REDUX_ROOT_KEY;
}
if (isUndefined(reduxRootSelector(store.getState()))) {
throw new Error('Existing store does not use apolloReducer. Please make sure the store ' + 'is properly configured and "reduxRootSelector" is correctly specified.');
}
this.store = store;
this.queryManager = new QueryManager_1.QueryManager({
networkInterface: this.networkInterface,
reduxRootSelector: reduxRootSelector,
store: store,
addTypename: this.addTypename,
resultTransformer: this.resultTransformer,
resultComparator: this.resultComparator,
reducerConfig: this.reducerConfig
});
};
;
return ApolloClient;
}();
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = ApolloClient;
//# sourceMappingURL=ApolloClient.js.map
```
Any and all comments I might learn from are appreciated. Thank you.
One way to do this would be to use webpack like this:
const webpack = require('webpack');
const path = require('path');
module.exports = {
// watch: true,
entry: {
ApolloClient: './config/ApolloClient.js',
createNetworkInterface: './config/createNetworkInterface.js',
Redux: './config/Redux.js',
},
output: {
path: path.join(__dirname, 'build/Libs'),
filename: '[name].js',
library: '[name]',
libraryTarget: 'commonjs',
},
module: {
rules: [
{
use: 'babel-loader',
test: /\.js$/,
exclude: /node_modules/,
},
],
},
plugins: [
new webpack.DefinePlugin({
'process.env.NODE_ENV': JSON.stringify(process.env.NODE_ENV),
}),
],
};
Then in config directory you could have:
/* ApolloClient.js */
import { ApolloClient } from 'apollo-client';
export default ApolloClient;
and
/* createNetworkInterface.js */
import { createNetworkInterface } from 'apollo-client/transport/networkInterface';
export default createNetworkInterface;
plus if you want to have Redux as well:
/* Redux.js */
import * as Redux from 'redux';
export default Redux;
However I was not able to get gql done this way and had to use bolav's fusepm.
Which you would use exactly as bolav has mention, first install it globally:
npm install -G fusepm and then fusepm npm graphql-tag
Once you have all these in place you can require them as follow:
var Redux = require('build/Libs/Redux');
var ApolloClient = require('build/Libs/ApolloClient');
var createNetworkInterface = require('build/Libs/createNetworkInterface');
var gql = require('fusejs_lib/graphql-tag_graphql-tag.umd');
This way still could use some TLC but for now, it works and get's the job done:
var networkInterface = createNetworkInterface.createNetworkInterface({
uri: 'http://localhost:8000/graphql',
});
var client = new ApolloClient.ApolloClient({
networkInterface,
});
client.query({
query: gql`
query {
allPosts {
edges {
node {
id
headline
summary(length: 80)
body
createdAt
updatedAt
personByAuthorId {
firstName
lastName
}
}
}
}
}
`,
})
.then(data => data.data.allPosts.edges.forEach(node => pages.add(createPage(node))))
.catch(error => console.log(error));
Also if you like I've setup a whole project along with server that might be of an interest to you: fuseR
I made fusepm, which has a mode to convert npm modules to run them under FuseTools. It's still has a lot of bugs, but at least I managed to come longer than you:
fuse create app apolloc
cd apolloc
npm install apollo-client
fusepm npm apollo-client
And then in your javascript:
<JavaScript>
var ApolloClient = require('fusejs_lib/apollo-client.js');
</JavaScript>
fusepm uses Babel, with some custom plugins.
I am trying to copy a folder using Node fs module. I am familiar with readFileSync() and writeFileSync() methods but I am wondering what method I should use to copy a specified folder?
You can use fs-extra to copy contents of one folder to another like this
var fs = require("fs-extra");
fs.copy('/path/to/source', '/path/to/destination', function (err) {
if (err) return console.error(err)
console.log('success!')
});
There's also a synchronous version.
fs.copySync('/path/to/source', '/path/to/destination')
Save yourself the extra dependency with just 10 lines of native node functions
Add the following copyDir function:
const { promises: fs } = require("fs")
const path = require("path")
async function copyDir(src, dest) {
await fs.mkdir(dest, { recursive: true });
let entries = await fs.readdir(src, { withFileTypes: true });
for (let entry of entries) {
let srcPath = path.join(src, entry.name);
let destPath = path.join(dest, entry.name);
entry.isDirectory() ?
await copyDir(srcPath, destPath) :
await fs.copyFile(srcPath, destPath);
}
}
And then use like this:
copyDir("./inputFolder", "./outputFolder")
Further Reading
Copy folder recursively in node.js
fsPromises.copyFile (added in v10.11.0)
fsPromises.readdir (added in v10.0)
fsPromises.mkdir (added in v10.0)
You might want to check out the ncp package. It does exactly what you're trying to do; Recursively copy files from a path to another.
Here's something to get your started :
const fs = require("fs");
const path = require("path");
const ncp = require("ncp").ncp;
// No limit, because why not?
ncp.limit = 0;
var thePath = "./";
var folder = "testFolder";
var newFolder = "newTestFolder";
ncp(path.join(thePath, folder), path.join(thePath, newFolder), function (err) {
if (err) {
return console.error(err);
}
console.log("Done !");
});
I liked KyleMit's answer, but thought a parallel version would be preferable.
The code is in TypeScript. If you need JavaScript, just delete the : string type annotations on the line of the declaration of copyDirectory.
import { promises as fs } from "fs"
import path from "path"
export const copyDirectory = async (src: string, dest: string) => {
const [entries] = await Promise.all([
fs.readdir(src, { withFileTypes: true }),
fs.mkdir(dest, { recursive: true }),
])
await Promise.all(
entries.map((entry) => {
const srcPath = path.join(src, entry.name)
const destPath = path.join(dest, entry.name)
return entry.isDirectory()
? copyDirectory(srcPath, destPath)
: fs.copyFile(srcPath, destPath)
})
)
}
Here's the synchronous version of #KyleMit answer
copyDirectory(source, destination) {
fs.mkdirSync(destination, { recursive: true });
fs.readdirSync(source, { withFileTypes: true }).forEach((entry) => {
let sourcePath = path.join(source, entry.name);
let destinationPath = path.join(destination, entry.name);
entry.isDirectory()
? copyDirectory(sourcePath, destinationPath)
: fs.copyFileSync(sourcePath, destinationPath);
});
}
There is an elegant syntax. You can use the pwd-fs module.
const FileSystem = require('pwd-fs');
const pfs = new FileSystem();
async () => {
await pfs.copy('./path', './dest');
}