webpack multicompile via node api (^4.44.2) - node.js

I have a Webpack configuration repository to separate the application boilerplate from the Webpack configuration. This application is dependent on the repository where the Webpack configuration is. I made a binary in the Webpack repository so that, in the application, I could compile and run this configuration in development. The configuration combines a common configuration with the past environment.
Problem: The scenario is beautiful but it is not working in the part of compiling and serving the application. Apparently my configuration is OK - i isolate them and test them separately. And I'm also following the v4 documentation for NODE-API.
As I have nowhere to turn, I'm sorry if I'm not on the right platform, i am studying how to compile different configurations of the same application (boilerplate) using webpack.
Link to the code.
i appreciate some repo example...

I came across several problems pointing to the Libs of the Webpack and Webpack-dev-server packages. However, today I got what I wanted. I will share here for future reference from other users.
My goal was to be able to trigger the development or production environment from a node script. Involving the construction of multiple FrontEnd applications, which was abstracted in webpack.config.
Now i can run mycli development on shell and this will trigger the construction of the configuration for that environment.
// mycli.js
#!/usr/bin/env node
const webpack = require('webpack')
const WebpackDevServer = require('webpack-dev-server')
const webpackConfiguration = require('./webpack/webpack.config')
const args = (process.argv && process.argv.length > 2) ? process.argv.slice(2) : []
const mode = args.length > 0 ? args[0] : 'development'
const config = webpackConfiguration(mode)
/**
* Minimum webpack configuration for cli validation.
* #see {#link webpackConfiguration} to further information
*/
const minConfig = {
entry: __dirname + '/test.js',
mode,
output: {
path: '/dist',
filename: 'bundle.js'
}
}
/** #type {WebpackCompilerInstance} compiler */
const compiler = webpack(minConfig)
switch (config.mode) {
case 'development':
/**
* Recommended by documentation:
* "If you're using dev-server through the Node.js API,
* the options in devServer will be ignored. Pass the options
* as a second parameter instead."
* #see {#link https://v4.webpack.js.org/configuration/dev-server/#devserver} for further information.
* #see {#link https://github.com/webpack/webpack-dev-server/tree/master/examples/api/simple} for example
*/
const devServerConfig = config.devserver;
if (config) delete config.devserver
const devServerOptions = Object.assign({}, devServerConfig, {
open: true,
stats: {
colors: true,
},
})
const devserverCallback = (err) => {
if (err) throw err
console.log('webpack-dev-server listening...')
}
new WebpackDevServer(compiler, devServerOptions).listen(devServerConfig.port, devServerConfig.host, devserverCallback)
break;
case 'production':
const compilerCallback = (err, stats) => {
console.log(stats, err)
if (err) throw err
process.stdout.write(`Stats: \n${stats} \n`)
console.log('Compiler has finished execution.')
}
compiler.run(compilerCallback)
break;
default:
console.error('No matching mode. Try "development" or "production".')
break;
}

Related

NodeJS + WebPack setting client static data

I have a NodeJS/React/WebPack application that I'm trying to take environment variables that are present at build time and export them as variables that are available to the client without having to request them with AJAX.
What is currently setup is a /browser/index.js file with a method that is exported however the variables are not getting expanded when webpack runs.
function applicationSetup()
{
const config = JSON.parse(process.env.CONFIG);
const APPLICATION_ID = process.env.APPLICATION_ID;
.........
}
During the build process we run node node_modules/webpack/bin/webpack.js --mode production with npm.
What do I need to do in order to expand the environment variable to be their actual values when webpack creates the .js file?
Edit 8/23
I've tried adding it in the webpack.DefinePlugin section of the webpack.config.js file however it's still doesn't seem to be available in the client side code. What am I missing?
Edit #2 (webpack.config.js)
const getClientConfig = (env, mode) => {
return {
plugins: [
new webpack.DefinePlugin({
__isBrowser__: 'false',
__Config__: process.env.CONFIG,
__ApplicationID__:process.env.APPLICATION_ID
})]
}
module.exports = (env, options) => {
const configs = [
getClientConfig(options.env, options.mode)
];
return configs;
};

Gatsby preview server in a serverless/stateless environment

Gatsby has documentation on how to setup a preview server here
Problem is it requires a server running 24/7 listening to requests, I would like to achieve the same result but in a serverless setup (AWS Lambda to be more specific) since we would need a preview very rarely.
The context here is using Gatsby with Wordpress as a headless data backend, and I want to implement a custom preview link in Wordpress for previewing posts before publishing them.
So far, there are two main setbacks :
Size, currently the size of node_modules for a Gatsby starter with Wordpress is 570mb
Speed, stateless means every preview request would be running gatsby develop again
I honestly don't know a solution for size here, not sure how to strip down packages.
As for speed, maybe there's a low level Gatsby API function to directly render a page to HTML? For example, a Node.js Lambda code could look like this (buildPageHTML is a hypothetical function I'm trying to find)
import buildPageHTML from "gatsby"
exports.handler = async function(event) {
const postID = event.queryStringParameters.postID
return buildPageHTML(`/preview_post_by_id/${postID}`)
}
Any ideas on how to go on about this?
Running Gatsby in an AWS Lambda
Try this lamba (from this beautiful tutorial) :
import { Context } from 'aws-lambda';
import { link } from 'linkfs';
import mock from 'mock-require';
import fs from 'fs';
import { tmpdir } from 'os';
import { runtimeRequire } from '#/utility/runtimeRequire.utility';
import { deployFiles } from '#/utility/deployFiles.utility';
/* -----------------------------------
*
* Variables
*
* -------------------------------- */
const tmpDir = tmpdir();
/* -----------------------------------
*
* Gatsby
*
* -------------------------------- */
function invokeGatsby(context: Context) {
const gatsby = runtimeRequire('gatsby/dist/commands/build');
gatsby({
directory: __dirname,
verbose: false,
browserslist: ['>0.25%', 'not dead'],
sitePackageJson: runtimeRequire('./package.json'),
})
.then(deployFiles)
.then(context.succeed)
.catch(context.fail);
}
/* -----------------------------------
*
* Output
*
* -------------------------------- */
function rewriteFs() {
const linkedFs = link(fs, [
[`${__dirname}/.cache`, `${tmpDir}/.cache`],
[`${__dirname}/public`, `${tmpDir}/public`],
]);
linkedFs.ReadStream = fs.ReadStream;
linkedFs.WriteStream = fs.WriteStream;
mock('fs', linkedFs);
}
/* -----------------------------------
*
* Handler
*
* -------------------------------- */
exports.handler = (event: any, context: Context) => {
rewriteFs();
invokeGatsby(context);
};
find the source here

Determine dependency's greatest matching version that exists on an NPM server from a semver version

I'm writing a node script which helps pin dependencies.
How can I determine the greatest realized version of a package existing on an NPM server, from a semver version?
For example, we have a dependency "foo" which is specified in a package.json as ~1.2.3.
Out on NPM, there exists published version 1.2.5, which is the latest published version compatible with ~1.2.3.
I need to write a script that would take as input "foo" and ~1.2.3, then after a server query, return 1.2.5. Something like this:
await fetchRealizedVersion('foo', '~1.2.3'); // resolves to 1.2.5
I understand I could do something like yarn upgrade and then parse the lock file, but I am looking for a more direct way of accomplishing this.
Hopefully there is a package that boils this down to an API call, but I'm not finding anything after googling around.
"Hopefully there is a package that boils this down to an API call,"
Short Answer: Unfortunately no, there is not a package that currently exists as far as I know.
Edit: There is the get-latest-version package you may want to try:
Basic usage:
const getLatestVersion = require('get-latest-version')
getLatestVersion('some-other-module', {range: '^1.0.0'})
.then((version) => console.log(version)) // highest version matching ^1.0.0 range
.catch((err) => console.error(err))
Alternatively, consider utilizing/writing a custom node.js module to perform the following steps:
Either:
Shell out the npm view command to retrieve all versions that are available in the NPM registry for a given package: For instance:
npm view <pkg> versions --json
Or, directly make a https request to the public npm registry at https://registry.npmjs.org to retrieve all versions available in for a given package.
Parse the JSON returned and pass it, along with the semver range (e.g. ~1.2.3), to the node-semver package's maxSatisfying() method.
The maxSatisfying() method is described in the docs as:
maxSatisfying(versions, range): Return the highest version in the list that satisfies the range, or null if none of them do.
Custom module (A):
The custom example module provided in get-latest-version.js (below) essentially performs the aforementioned steps. In this example we shell out the npm view command.
get-latest-version.js
'use strict';
//------------------------------------------------------------------------------
// Requirements
//------------------------------------------------------------------------------
const { exec } = require('child_process');
const { maxSatisfying } = require('semver');
//------------------------------------------------------------------------------
// Data
//------------------------------------------------------------------------------
const errorBadge = '\x1b[31;40mERR!\x1b[0m';
//------------------------------------------------------------------------------
// Helpers
//------------------------------------------------------------------------------
/**
* Captures the data written to stdout from a given shell command.
*
* #param {String} command The shell command to execute.
* #return {Promise<string>} A Promise object whose fulfillment value holds the
* data written to stdout. When rejected an error message is returned.
* #private
*/
function shellExec(command) {
return new Promise((resolve, reject) => {
exec(command, (error, stdout, stderr) => {
if (error) {
reject(new Error(`Failed executing command: '${command}'`));
return;
}
resolve(stdout.trim());
});
});
}
//------------------------------------------------------------------------------
// Public Interface
//------------------------------------------------------------------------------
module.exports = {
/**
* Retrieves the latest version that matches the given range for a package.
*
* #async
* #param {String} pkg The package name.
* #param {String} range The semver range.
* #returns {Promise<string>} A Promise object that when fulfilled returns the
* latest version that matches. When rejected an error message is returned.
*/
async fetchRealizedVersion(pkg, range) {
try {
const response = await shellExec(`npm view ${pkg} versions --json`);
const versions = JSON.parse(response);
return maxSatisfying(versions, range);
} catch ({ message: errorMssg }) {
throw Error([
`${errorBadge} ${errorMssg}`,
`${errorBadge} '${pkg}' is probably not in the npm registry.`
].join('\n'));
}
}
};
Usage:
The following index.js demonstrates using the aforementioned module.
index.js
'use strict';
const { fetchRealizedVersion } = require('./get-latest-version.js');
(async function() {
try {
const latest = await fetchRealizedVersion('eslint', '~5.15.0');
console.log(latest); // --> 5.15.3
} catch ({ message: errMssg }) {
console.error(errMssg);
}
})();
As you can see, in that example we obtain the latest published version for the eslint package that is compatible with the semver tilde range ~5.15.0.
The latest/maximum version that satisfies ~5.15.0 is printed to the console:
$ node ./index.js
5.15.3
Note: You can always double check the results using the online semver calculator which actually utilizes the node-semver package.
Another Usage Example:
The following index.js demonstrates using the aforementioned module to obtain the latest/maximum version for multiple packages and different ranges.
index.js
'use strict';
const { fetchRealizedVersion } = require('./get-latest-version.js');
const criteria = [
{
pkg: 'eslint',
range: '^4.9.0'
},
{
pkg: 'eslint',
range: '~5.0.0'
},
{
pkg: 'lighthouse',
range: '~1.0.0'
},
{
pkg: 'lighthouse',
range: '^1.0.4'
},
{
pkg: 'yarn',
range: '~1.3.0'
},
{
pkg: 'yarn',
range: '^1.3.0'
},
{
pkg: 'yarn',
range: '^20.3.0'
},
{
pkg: 'quuxbarfoo',
range: '~1.3.0'
}
];
(async function () {
// Each request is sent and read in parallel.
const promises = criteria.map(async ({ pkg, range }) => {
try {
return await fetchRealizedVersion(pkg, range);
} catch ({ message: errMssg }) {
return errMssg;
}
});
// Log each 'latest' semver in sequence.
for (const latest of promises) {
console.log(await latest);
}
})();
The result for that last example is as follows:
$ node ./index.js
4.19.1
5.0.1
1.0.6
1.6.5
1.3.2
1.22.4
null
ERR! Failed executing command: 'npm view quuxbarfoo versions --json'
ERR! 'quuxbarfoo' is probably not in the npm registry.
Additional Note: The shellExec helper function in get-latest-version.js currently promisifies the child_process module's exec() method to shell out the npm view command. However, since node.js version 12 the built-in util.promisify provides another way to promisify the exec() method (as shown in the docs for exec), so you may prefer to do it that way instead.
Custom module (B):
If you wanted to avoid shelling out the npm view command you could consider making a request directly to the https://registry.npmjs.org endpoint instead (which is the same endpoint that the npm view command sends a https GET request to).
The modified version of get-latest-version.js (below) essentially utilizes a promisified version of the builtin https.get.
Usage is the same as demonstrated previously in the "Usage" section.
get-latest-version.js
'use strict';
//------------------------------------------------------------------------------
// Requirements
//------------------------------------------------------------------------------
const https = require('https');
const { maxSatisfying } = require('semver');
//------------------------------------------------------------------------------
// Data
//------------------------------------------------------------------------------
const endPoint = 'https://registry.npmjs.org';
const errorBadge = '\x1b[31;40mERR!\x1b[0m';
//------------------------------------------------------------------------------
// Helpers
//------------------------------------------------------------------------------
/**
* Requests JSON for a given package from the npm registry.
*
* #param {String} pkg The package name.
* #return {Promise<json>} A Promise object that when fulfilled returns the JSON
* metadata for the specific package. When rejected an error message is returned.
* #private
*/
function fetchPackageInfo(pkg) {
return new Promise((resolve, reject) => {
https.get(`${endPoint}/${pkg}/`, response => {
const { statusCode, headers: { 'content-type': contentType } } = response;
if (statusCode !== 200) {
reject(new Error(`Request to ${endPoint} failed. ${statusCode}`));
return;
}
if (!/^application\/json/.test(contentType)) {
reject(new Error(`Expected application/json but received ${contentType}`));
return;
}
let data = '';
response.on('data', chunk => {
data += chunk;
});
response.on('end', () => {
resolve(data);
});
}).on('error', error => {
reject(new Error(`Cannot find ${endPoint}`));
});
});
}
//------------------------------------------------------------------------------
// Public Interface
//------------------------------------------------------------------------------
module.exports = {
/**
* Retrieves the latest version that matches the given range for a package.
*
* #async
* #param {String} pkg The package name.
* #param {String} range The semver range.
* #returns {Promise<string>} A Promise object that when fulfilled returns the
* latest version that matches. When rejected an error message is returned.
*/
async fetchRealizedVersion(pkg, range) {
try {
const response = await fetchPackageInfo(pkg);
const { versions: allVersionInfo } = JSON.parse(response);
// The response includes all metadata for all versions of a package.
// Let's create an Array holding just the `version` info.
const versions = [];
Object.keys(allVersionInfo).forEach(key => {
versions.push(allVersionInfo[key].version)
});
return maxSatisfying(versions, range);
} catch ({ message: errorMssg }) {
throw Error([
`${errorBadge} ${errorMssg}`,
`${errorBadge} '${pkg}' is probably not in the npm registry.`
].join('\n'));
}
}
};
Note The version of node-semver used in the example custom modules (A & B) IS NOT the current latest version (i.e. 7.3.2). Version ^5.7.1 was used instead - which is the same version used by the npm cli tool.

How can I control PhantomJS in WebdriverIO to skip download / load some kind of resource (i.e., fonts, images, css, js)?

Below is my code, currently this works fine.. but i want to optimize it to not load / download some resources like (fonts, images, css, js).. I've read the api docs but i'mnot able to find the related configs.. Well, I'm using webdriverIO and phantomjs as browser..
'use strict';
var _ = require('lodash');
var webdriverio = require('webdriverio');
var cheerio = require('cheerio');
/**
* Base class for browser based crawler.
* To run this crawler you need to first run phantomJS with webdriver on localhost
* ```
* ./phantomjs --webdriver 4444
* ```
*/
class BaseWebdriverIO {
/**
* Constructor
* #param opts - webdriverio config http://webdriver.io/guide/getstarted/configuration.html
*/
constructor(opts) {
this.opts = _.defaults(opts || {}, {
desiredCapabilities: {
browserName: 'phantomjs'
}
});
}
/**
* webdriver and parse url func
* #param parseUrl
* #returns {Promise}
*/
parse(parseUrl) {
console.log("getting url", parseUrl);
return webdriverio.remote(this.opts)
.init()
.url(parseUrl)
.waitForVisible('body')
.getHTML('body', false, function(err, html) {
if (err) {
throw new Error(err);
}
this.end();
return cheerio.load(html);
});
}
}
module.exports = BaseWebdriverIO;
I'm not able to find any documentation related this.
Can anyone tell me, How can I do that?
Edit/Update: I've found a working example which optimize images to not load by using setting phantomjs.cli.args from here: https://github.com/angular/protractor/issues/150#issuecomment-128109354 Some basic settings have been configured and works fine though, this is the modified desiredCapabilities settings object:
desiredCapabilities: {
'browserName': 'phantomjs',
'phantomjs.binary.path': require('phantomjs').path,
'phantomjs.cli.args': [
'--ignore-ssl-errors=true',
'--ssl-protocol=any', // tlsv1
'--web-security=false',
'--load-images=false',
//'--debug=false',
//'--webdriver-logfile=webdriver.log',
//'--webdriver-loglevel=DEBUG',
],
javascriptEnabled: false,
logLevel: 'verbose'
}
And css/fonts optimization i 've found question raised on stack overflow How can I control PhantomJS to skip download some kind of resource? and the solution to this discussed there is something like this:
page.onResourceRequested = function(requestData, request) {
if ((/http:\/\/.+?\.css/gi).test(requestData['url']) || requestData['Content-Type'] == 'text/css') {
console.log('The url of the request is matching. Aborting: ' + requestData['url']);
// request.abort();
request.cancel();
}
};
But I 'm not able trigger this function via in webdriverIO's configs desiredCapabilities object.. i.e., onResourceRequested()..
Can anyone tell me how can i call/define this function in my WebdriverIO script capabilities or any other way? Thanks.

What is the correct way to load polyfills and shims with Browserify

I'm building a web app and I'm getting to know and love Browserify. One thing has bugged me though.
I'm using some ES6 features that need to be shimmed/polyfilled in older browsers such as es6-promise and object-assign (packages on npm).
Currently I'm just loading them into each module that needs them:
var assign = require('object-assign');
var Promise = require('es6-promise');
I know this is definitely not the way to go. It is hard to maintain and I would like to use the ES6 features transparently instead of having to "depend" on them via requires.
What's the definitive way to load shims like these? I've seen several examples around the internet but they're all different. I could:
load them externally:
var bundle = browserify();
bundle.require('s6-promise');
// or should I use it bundle.add to make sure the code is runned???
The problem I have here is that I don't know which order the modules
will be loaded in in the browser. So the polyfilling might not have happened
yet at call sites that need the polyfilled functionality.
This has the additional downside that backend code cannot benefit from these
polyfills (unless I'm missing something).
use browserify-shim or something similar. I don't really see how this would work for ES6 features.
manually set up the polyfilling:
Object.assign = require('object-assign');
Don't require polyfills in your modules, that's an anti-pattern. Your modules should assume that the runtime is patched (when needed), and that should be part of the contract. A good example of this is ReactJS, where they explicitly define the minimum requirement for the runtime so that the library can work: http://facebook.github.io/react/docs/working-with-the-browser.html#browser-support-and-polyfills
You could use a polyfill service (e.g.: https://cdn.polyfill.io/) to include an optimized script tag at the top of your page to guarantee that the runtime is patched correctly with the pieces you need, while modern browsers will not get penalized.
This is the method that I'm using. The key is that you have to export your polyfill properly at the top of your main entry file.
The following won't work:
// Using ES6 imports
import './polyfill';
// Using CommonJS style
require('./polyfill');
... // rest of your code goes here
You actually need to export the polyfill:
// Using ES6 export
export * from './polyfill';
// Using CommonJS style
var polyfill = require('./polyfill');
... // rest of your code goes here
Your polyfills will load correctly if you do either of the latter methods.
Below you can find examples of my polyfills.
polyfill.js:
import './polyfill/Array.from';
import './polyfill/Object.assign';
Object.assign:
if (typeof Object.assign !== 'function') {
(function iife() {
const ObjectHasOwnProperty = Object.prototype.hasOwnProperty;
/**
* Copy the values of all enumerable own properties from one source
* object to a target object. It will return the target object.
* #param {Object} target The target object.
* #param {Object} source The source object.
* #return {Object} The target object.
*/
function shallowAssign(target, source) {
if (target === source) return target;
Object.keys(source).forEach((key) => {
// Avoid bugs when hasOwnProperty is shadowed
if (ObjectHasOwnProperty.call(source, key)) {
target[key] = source[key];
}
});
return target;
}
/**
* Copy the values of all enumerable own properties from one source
* object to a target object. It will return the target object.
* #param {Object} target The target object.
* #param {Object} source The source object.
* #return {Object} The target object.
*/
Object.assign = function assign(target, ...sources) {
if (target === null || target === undefined) {
throw new TypeError('Cannot convert undefined or null to object');
}
sources.forEach((source) => {
if (source !== null) { // Skip over if undefined or null
shallowAssign(Object(target), Object(source));
}
});
return target;
};
}());
}
One solution that worked for me was to use bundle.add
I split my bundle in 2 parts, app.js for app code, and appLib.js for libraries (this one will be cached as it does not change oftenly).
See https://github.com/sogko/gulp-recipes/tree/master/browserify-separating-app-and-vendor-bundles
For appLibs.js I use bundle.add for polyfills, as they must be loaded when the script is loaded, while I use bundle.require for other libs, that will be loaded only when required inside app.js.
polyfills.forEach(function(polyfill) {
b.add(polyfill);
});
libs.forEach(function(lib) {
b.require(lib);
});
The page loads these 2 bundles in order:
<head>
...
<script type="text/javascript" src="appLibs.js" crossorigin></script>
<script type="text/javascript" src="app.js" crossorigin></script>
...
</head>
This way it seems safe to assume that all polyfills will be loaded even before other libs are initialized. Not sure it's the best option but it worked for me.
My complete setup:
"use strict";
var browserify = require('browserify');
var gulp = require('gulp');
var gutil = require('gulp-util');
var handleErrors = require('../util/handleErrors');
var source = require('vinyl-source-stream');
var watchify = require("watchify");
var livereload = require('gulp-livereload');
var gulpif = require("gulp-if");
var buffer = require('vinyl-buffer');
var uglify = require('gulp-uglify');
// polyfills should be automatically loaded, even if they are never required
var polyfills = [
"intl"
];
var libs = [
"ajax-interceptor",
"autolinker",
"bounded-cache",
"fuse.js",
"highlight.js",
"imagesloaded",
"iscroll",
"jquery",
"keymaster",
"lodash",
"medium-editor",
"mime-db",
"mime-types",
"moment",
"packery",
"q",
"rangy",
"spin.js",
"steady",
"store",
"string",
"uuid",
"react-dnd"
];
// permits to create a special bundle for vendor libs
// See https://github.com/sogko/gulp-recipes/tree/master/browserify-separating-app-and-vendor-bundles
gulp.task('browserify-libs', function () {
var b = browserify({
debug: true
});
polyfills.forEach(function(polyfill) {
b.add(polyfill);
});
libs.forEach(function(lib) {
b.require(lib);
});
return b.bundle()
.on('error', handleErrors)
.pipe(source('appLibs.js'))
// TODO use node_env instead of "global.buildNoWatch"
.pipe(gulpif(global.buildNoWatch, buffer()))
.pipe(gulpif(global.buildNoWatch, uglify()))
.pipe(gulp.dest('./build'));
});
// Inspired by http://truongtx.me/2014/08/06/using-watchify-with-gulp-for-fast-browserify-build/
gulp.task('browserify',['cleanAppJs','browserify-libs'],function browserifyShare(){
var b = browserify({
cache: {},
packageCache: {},
fullPaths: true,
extensions: ['.jsx'],
paths: ['./node_modules','./src/'],
debug: true
});
b.transform('reactify');
libs.forEach(function(lib) {
b.external(lib);
});
// TODO use node_env instead of "global.buildNoWatch"
if ( !global.buildNoWatch ) {
b = watchify(b);
b.on('update', function() {
gutil.log("Watchify detected change -> Rebuilding bundle");
return bundleShare(b);
});
}
b.on('error', handleErrors);
//b.add('app.js'); // It seems to produce weird behaviors when both using "add" and "require"
// expose does not seem to work well... see https://github.com/substack/node-browserify/issues/850
b.require('app.js',{expose: 'app'});
return bundleShare(b);
});
function bundleShare(b) {
return b.bundle()
.on('error', handleErrors)
.pipe(source('app.js'))
.pipe(gulp.dest('./build'))
// TODO use node_env instead of "global.buildNoWatch"
.pipe(gulpif(!global.buildNoWatch, livereload()));
}
As you can see
Or use the polyfill service at
https://cdn.polyfill.io/v2/docs/

Resources