Convert Node js app to exe with nexe failed? - node.js

I have a node js app that runs a local server on port 8080 that delivers an index.html file.
I need to convert the node js app into exe, for a one-click run. I have been trying to convert the node js app to exe with nexe module,but the created exe doesn't run and crashes. I don't know where I am doing wrong or should I try with other modules.
Please guide me on this .
Here are the required files and my attempts.
package.json file
{
"name": "node-web-example",
"version": "1.0.0",
"description": "",
"main": "bundle.js",
"scripts": {
"start": "npm run bundle-js | http-server",
"bundle-js": "watchify main.js -o bundle.js",
"build": "nexe -r bundle.js -r index.html -r style.css -o MyApplication-1.exe -t x86-8.0.0"
},
"author": "",
"license": "ISC",
"dependencies": {
"dotenv": "^8.2.0",
"uneeq-js": "^2.35.0"
},
"devDependencies": {
"browserify": "^16.5.0",
"nexe": "^4.0.0-beta.18",
"watchify": "^3.11.1"
}
}
for exe build I used the command npm run build and failed
here is the error trace when the exe is run from terminal
var n=V(105),r=V(106),i=V(83);function o(){return a.TYPED_ARRAY_SUPPORT?2147483647:1073741823}function s(e,t){if(o()<t)throw new RangeError("Invalid typed array length");return a.TYPED_ARRAY_SUPPORT?(e=new Uint8Array(t)).__proto__=a.prototype:(null===e&&(e=new a(t)),e.length=t),e}function a(e,t,V){if(!(a.TYPED_ARRAY_SUPPORT||this instanceof a))return new a(e,t,V);if("number"==typeof e){if("string"==typeof t)throw new Error("If encoding is specified then the first argument must be a string");return h(this,e)}return c(this,e,t,V)}function c(e,t,V,n){if("number"==typeof t)throw new TypeError('"value" argument must not be a number');return"undefined"!=typeof ArrayBuffer&&t instanceof ArrayBuffer?function(e,t,V,n){if(t.byteLength,V<0||t.byteLength<V)throw new RangeError("'offset' is out of bounds");if(t.byteLength<V+(n||0))throw new RangeError("'length' is out of bounds");t=void 0===V&&void 0===n?new Uint8Array(t):void 0===n?new Uint8Array(t,V):new Uint8Array(t,V,n);
ReferenceError: window is not defined
at Module.n.__awaiter.V (D:\Uneeq Demo\examples\web\node\bundle.js:192:140642)
at V (D:\Uneeq Demo\examples\web\node\bundle.js:170:158)
at Object.e.exports (D:\Uneeq Demo\examples\web\node\bundle.js:185:23055)
at V (D:\Uneeq Demo\examples\web\node\bundle.js:170:158)
at Object.setPrototypeOf.__proto__ (D:\Uneeq Demo\examples\web\node\bundle.js:170:957)
at Object.__dirname.2 (D:\Uneeq Demo\examples\web\node\bundle.js:170:967)
at o (D:\Uneeq Demo\examples\web\node\bundle.js:1:327)
at D:\Uneeq Demo\examples\web\node\bundle.js:1:378
at Object.__dirname.1.uneeq-js (D:\Uneeq Demo\examples\web\node\bundle.js:2:22)
at o (D:\Uneeq Demo\examples\web\node\bundle.js:1:327)
But if the use the npm start in the terminal then it runs fine from there and the webpage is openable at localhost://8080 .
here is the complete directory location
Please any help or direction is highly appreciated!!

Related

"Uncaught (in promise) ReferenceError: process is not defined" when migrating to Parcel 2

I'm trying to migrate a Node.js webapp from Parcel 1 to Parcel 2.
I have a function in the client-side javascript code (that Parcel bundles) that calls another function I'm importing from a utility functions file in the back-end Node.js code.
All other front-end functions work and all other Node.js functions which require Node.js process still work.
When I trigger calling this function in the code:
getCloudinaryUrl.js:22 Uncaught (in promise) ReferenceError: process is not defined
Everything worked just fine in Parcel 1, so I'm assuming this is a problem with my Parcel 2 configuration, not with Cloudinary.
The offending lines:
In getColudinaryUrl.js (back-end):
const { Cloudinary } = require('cloudinary-core');
...
// this is what triggers the error
const cloudName = process.env.CLOUDINARY_CLOUD_NAME;
const cl = new Cloudinary({
cloud_name: cloudName,
});
In index.js (front-end):
import getCloudinaryUrl from './../../utils/getCloudinaryUrl';
// then I'm calling it later on in the code
In server.js (back-end)
This is the only place in the code where I do dotenv.config:
const dotenv = require('dotenv');
...
dotenv.config({ path: './.env' });
My OLD package.json with Parcel 1 which worked:
...
"scripts": {
...
"watch:js": "parcel watch ./public/js/index.js --public-url /js --out-dir ./public/js --out-file bundle.js",
"build:js": "parcel build ./public/js/index.js --public-url /js --out-dir ./public/js --out-file bundle.js"
},
"devDependencies": {
...
"parcel-bundler": "1.12.3",
...
},
"engines": {
"node": "^14"
}
My NEW package.json file which doesn't work:
...
"scripts": {
...
"watch:js": "rm -rf .parcel-cache/ && parcel watch ./public/js/index.js --public-url /js --dist-dir ./public/js",
"build:js": "rm -rf .parcel-cache/ && parcel build ./public/js/index.js --public-url /js --dist-dir ./public/js"
},
"devDependencies": {
...
"parcel": "^2.0.0-nightly.524",
...
},
"engines": {
"node": "^14"
},
"default": "./public/js/bundle.js",
"targets": {
"main": false,
"default": {
"includeNodeModules": true,
"scopeHoist": false
}
}
I added rm -rf .parcel-cache/ && since otherwise a second build would always fail.
I read the migration guide and several other pages:
https://v2.parceljs.org/getting-started/migration/
https://v2.parceljs.org/features/module-resolution/
https://v2.parceljs.org/features/node-emulation/
It wasn't easy for me to read and, being rather new, Parcel 2 doesn't have many resources online to read over. That's how I ended up with the new package.json file above which gave me the least amount of errors (excluding the one above).
If there's anything else I should add to the question, I will gladly provide it.
How do I configure Parcel 2 to detect process in that one file?
It could be as easy as adding CLOUDINARY_CLOUD_NAME=something to a .env file in the project root?
A bit late, but for anyone ending up here, I eventually fixed it by removing the engines key from the package.json.

Load .env environment variables when running npm task

Let's say we have a .env file with some variables specified:
AWS_PROFILE=hsz
ENVIRONMENT=development
There is also a simple npm task defined:
{
"name": "project",
"version": "0.0.1",
"scripts": {
"deploy": "sls deploy"
}
}
But runnning npm run deploy ignores our .env definition.
It can be resolved with better-npm-run like:
{
"name": "project",
"version": "0.0.2",
"scripts": {
"deploy": "bnr deploy"
},
"betterScripts": {
"deploy": "sls deploy"
},
"devDependencies": {
"better-npm-run": "^0.1.1",
}
}
but this looks like an overhead - especially when we have 10+ tasks.
Is there a better way to always load .env without proxying all tasks via better-npm-run?
A bit ugly, but you could try something like this:
"scripts": {
"deploy": "export $(cat .env | xargs) && sls deploy"
}
This will export all environment variables from the .env file before running sls deploy.
There are some variations to this tehnique in this answer.
Not very clean but it avoids usage of an extra module.
You can use env-cmd npm package to set environment variables loaded from .env file before executing a npm script.
Add package to your package.json devDependencies:
npm i env-cmd -D
Prefix your npm script with env-cmd program in package.json:
{
"scripts": {
"deploy": "env-cmd sls deploy"
}
}
Maintain and load all your environment specific configuration in project itself.
dev.js
module.exports = {
"host":"dev.com"
}
prod.js
module.exports = {
"host":"prod.com"
}
config.js - main file that will resolve configuration based on process.env.ENV variable.
const dev = require('./dev');
const prod = require('./prod');
let envObject = {};
const env = process.env.ENV || "dev";
switch(env) {
case 'prod':
envObject = prod;
break;
default:
envObject = dev;
}
envObject['ENV'] = env;
process.env = Object.assign(process.env,envObject); // Optional if you prefer to add them into process environment otherwise `require('./config')` where you need configuration.
module.exports = envObject;
index.js - node project root file call every time when project start
const config = require('./config');
console.log('config object => ',config.host);
package.json
{
"name": "project",
"version": "0.0.2",
"scripts": {
"deploy": "sls deploy"
}
}
Running you node.js code
Prod environment ENV=prod npm run deploy;
Development environment - npm run deploy;
Default environment is set to dev in ./config.js
Using this simple practice you don't need any npm module to manage your environment configurations.
I was having the same issue while trying to syncing the DB using an external command and fixed the issue by requiring dotenv package which will load the variables
"scripts": {
"db-sync": "node --require dotenv/config ./src/sequelize/sync.js"}
then just call npm run db-sync

How to set-up my provided code for reactJS development?

I have been provided a code base which has reactJS included in chunks, it is not a complete reactJS project. I do not have much experience with webpacks, reactJS, nodeJS. Since there is no "start" command in "scripts" of package.json, it won't run the project. Upon opening index.html, all I see is the non-react part, the reactJS components are not showing on the browser. I will share with you my package.json and webpack.config.js files, please kindly let me know how to run it on node server.
Package.json:
"main": "webpack.config.js",
"scripts": {
"build": "webpack && uglifyjs ./assets/build/postadd.js -c -m -o ./assets/build/postadd.min.js "
}
webpack.config.js:
debug = process.env.NODE_ENV !== "production";
var webpack = require('webpack');
module.exports = {
context: __dirname,
devtool: "inline-sourcemap" ,
entry: {
postadd: "./js/postadd/main.js",
search: "./js/search/main.js"
},
output: {
path: __dirname+ "/assets/build/",
filename: "[name].js"
}
There is no command in scripts other than "build". If you need any more details please let me know, I am stuck.

Error: Cannot find module 'ipfs' web3.min.js:1:155

I Can't work with node module ipfs.js
console shows error: "Cannot find module 'ipfs'"
Ubuntu 16.04.4 LTS
node --version == v8.10.0
npm --version == 5.6.0
ipfs version == 0.4.13
My package.json:
{
"scripts": {
"dev": "lite-server",
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC",
"devDependencies": {
"lite-server": "^2.3.0"
},
"dependencies": {
"ipfs": "^0.28.2",
"web3": "^0.20.6"
}
}
My app.js:
const IPFS = require('ipfs')
const node = new IPFS()
// // set the provider you want from Web3.providers
web3 = new Web3(new `Web3.providers.HttpProvider("http://127.0.0.1:8545"));`
When I run in command line, its works:
> const IPFS = require('ipfs')
undefined
> const node = new IPFS()
undefined
> Swarm listening on /ip4/127.0.0.1/tcp/4003/ws/ipfs/QmYwqrDJCQEiY2fijnwpPhhsG5w8rVxCTjK7duxtPyt24J
Swarm listening on /ip4/127.0.0.1/tcp/4002/ipfs/QmYwqrDJCQEiY2fijnwpPhhsG5w8rVxCTjK7duxtPyt24J
Swarm listening on /ip4/192.168.2.103/tcp/4002/ipfs/QmYwqrDJCQEiY2fijnwpPhhsG5w8rVxCTjK7duxtPyt24J
I was able to solve my problem.
I had no experience with node, so I completely confused its use, in the issue of server-side and client-side use. My intention was to use ipfs in the browser
I am creating a Dapp using the Truffle framework, and the truffle provides a "web3.min.js" file, this library was conflicting with "var ipfs = require ('ipfs').
The solution was simple, I'm using the js-ipfs library only in the browser:
https://github.com/ipfs/js-ipfs#use-in-the-browser
Now my code it's:
my html:
<script src="js/web3.min.js"></script>
<script src="https://unpkg.com/ipfs/dist/index.min.js"></script>
<script src="js/myjs.js"></script>
my app.js:
var web3 = new Web3(new Web3.providers.HttpProvider("http://127.0.0.1:8545"));
console.log(web3);
console.log(Ipfs);

Nodejs app with npm start script

I'm very new to nodejs.
In my dockerized environment, I want to provide appdynamics support to nodejs apps. This mandates every app to require the following as the first line in their app.
require("appdynamics").profile({
controllerHostName: '<controller host name>',
controllerPort: <controller port number>,
controllerSslEnabled: false, // Set to true if controllerPort is SSL
accountName: '<AppDynamics_account_name>',
accountAccessKey: '<AppDynamics_account_key>', //required
applicationName: 'your_app_name',
tierName: 'choose_a_tier_name',
nodeName: 'choose_a_node_name',
});
I plan to do that by providing a wrapper called appdynamics.js around the app's entry file. Details:
I run a script in my nodejs docker image to replace the entry file name in the app's package.json with "appdynamics.js", where appdynamics.js has the above appdynamics related require statement.
Ex : {scripts { "start" : "node server.js" }} will be replaced with
{scripts { "start" : "node appdynamics.js"}}
Then, i "require" the "server.js" inside appdynamics.js.
Invoke npm start.
My only concern is this:
If the package.json had something like scripts { "start" : "coffee server.coffee" }, my script will replace it to { "start" : "coffee appdynamics.js" }. and then my script will invoke npm start, which will error out.
What is the best way to solve this?
This is a follow up question to Use "coffee" instead of "node" command in production
Write a wrapper called appdynamics.coffee
Compile this wrapper to .js
Replace server.js with appdynamics.js and server.coffee with appdynamics.coffee
After this operations
{
"scripts": {
"start": "node server.js"
}
}
will be
{
"scripts": {
"start": "node appdynamics.js"
}
}
and
{
"scripts": {
"start": "coffee server.coffee"
}
}
will be
{
"scripts": {
"start": "coffee appdynamics.coffee"
}
}

Resources