Webshot fails on Meteor in DigitalOcean Ubuntu 14.04 - node.js

I am using this code to generate pdf:
let fileUri = process.env.PWD + '/storage/orders-pdf/' + fileName;
// Commence Webshot
webshot(html_string, fileUri, options, function(error) {
fs.readFile(fileUri, function (err, data) {
if (err) {
return console.log(err);
}
fs.unlinkSync(fileUri);
fut.return(data);
});
});
let pdfData = fut.wait();
But it throws the following error:
{ [Error: ENOENT, open '/opt/holi/storage/orders-pdf/Attributes.pdf']
errno: 34,
code: 'ENOENT',
path: '/opt/holi/storage/orders-pdf/Attributes.pdf' }
Tried to use npm package https://github.com/brenden/node-webshot
Then code works perfectly on localhost, but fails on the server and throws this error:
EDIT:
Even when running webshot without:
fs.readFile(fileUri, function (err, data) {
if (err) {
return console.log(err);
}
fs.unlinkSync(fileUri);
fut.return(data);
});
The file is not created..
EDIT-2:
Webshot throws an error: [Error: PhantomJS exited with return value 2]
EDIT-3:
Actual issue: https://github.com/brenden/node-webshot/issues/123

I had a similar problem, and spent most of the day trying to figure out the issue. I ended up adding:
"phantomPath": "/usr/bin/phantomjs"
to my webshot options object. The phantom path I used is where mup installs phantomjs on your server setup.

Related

writeFile does not create file

Error code looks like:
{ Error: ENOENT: no such file or directory, open 'sad' errno: -2, code: 'ENOENT', syscall: 'open', path: 'sad' }
where 'sad' is the name of file I would like to write to and it doesn't exist.
Code looks like this:
fs.writeFile(filename, JSON_string, { flag: 'w' }, function(err){
if(err){
return console.error(err);
}
return JSON_string;
});
There are other similar questions, but they are all wrong in their path, starting or not starting with /, I just want to write a file on root from where I run this node.js application (it is also initialized with npm in this directory..).
Running with
sudo node server4.js
Doesnt work either.
Changing flags to w+ or wx or whatever, doesn't help.
Code works if file exists.
Node v9+.
I need to use writeFile() function.
This is working for me, please check if this works in your system:
var fs = require('fs')
fs.writeFile('./myfile.txt', 'Content to write', { flag: 'w' }, function(err) {
if (err)
return console.error(err);
fs.readFile('./myfile.txt', 'utf-8', function (err, data) {
if (err)
return console.error(err);
console.log(data);
});
});
(besides writing it also reads to confirm)

How do you run RServe on AWS Lambda with NodeJS?

While this question is quite open-ended, I'm generally trying to follow this excellent post here: https://aws.amazon.com/blogs/compute/analyzing-genomics-data-at-scale-using-r-aws-lambda-and-amazon-api-gateway/ which describes setting up R to run with python. I, on the other hand, am trying to get R to work with NodeJs.
I've packaged up my dependencies, deployed to Lambda, and can run simple Node scripts. However, I am having difficulty connecting to RServe from Node using the npm package Rio (https://www.npmjs.com/package/rio). RServe, on both my localhost and on Heroku, will accept the default connection of 127.0.0.1 and port 6331. No luck with AWS Lambda.
'use strict';
var rio = require('rio');
var Promise = require('bluebird');
var exec = require('child_process').exec;
var whenReady = new Promise(function(resolve){
// require libraries and bootup RServe
exec('Rscript init.R', function(error, stdout, stderr) {
(function check() {
// Attempt to connect to RServe through Rio using my 'up' test function
rio.e({
entrypoint: 'up',
callback: function (err) {
console.log(err);
if (err) return setTimeout(check, 100);
// If no connection error, rserve is running
console.log("Rserve up");
resolve();
}
});
})();
});
});
exports.handler = function(event, context, callback) {
whenReady.then(function () {
// Call hello world
rio.e({
entrypoint: 'hello',
data: {name:'Will'},
callback: function(err, result){
console.log("Error", err);
callback(null, result);
}
});
});
};
This ends with connection refused errors
2017-03-01T22:58:33.210Z 96f69baf-fed2-11e6-9164-e91b9773d645 {
[Error: connect ECONNREFUSED 127.0.0.1:6311] code: 'ECONNREFUSED',
errno: 'ECONNREFUSED', syscall: 'connect', address: '127.0.0.1',
port: 6311 }
Any ideas on how to fix this one? I'm hoping we don't need to get complicated: https://aws.amazon.com/blogs/aws/new-access-resources-in-a-vpc-from-your-lambda-functions/
Thank you in advance!
** Update **
init.R does the following
// Require some libraries
...
require('jsonlite');
up <- function () {
toJSON(TRUE)
}
run.Rserve()
** Last Update **
Gave up and went to the python example as posted in the first link.
Will

Error downloading file from my Node.js server

I'm trying to make a download link on my server for a zip file and I'm currently getting this error: (Note, still just testing it on local machine)
{ [Error: ENOENT: no such file or directory, stat 'C:\Users\Jordan\Desktop\Websites\HappyCamel\Users\Jordan\Desktop\Websites\HappyCamel']
errno: -4058,
code: 'ENOENT',
syscall: 'stat',
path: 'C:\\Users\\Jordan\\Desktop\\Websites\\HappyCamel\\Users\\Jordan\\Desktop\\Websites\\HappyCamel',
expose: false,
statusCode: 404,
status: 404 }
The relevant code is this:
router.get('/file/:name', function(req, res, next) {
console.log('test123'); //successfully prints to console
res.download('Users/Jordan/Desktop/Websites/HappyCamel/', 'test123.zip', function(err) {
console.log('test456'); //successfully prints to console
if(err) {
console.log(err) //I'm assuming this is source of logged error
} else {
console.log("no error"); //doesn't print
}
});
})
edit:
Fixed it with changing this line:
res.download('Users/Jordan/Desktop/Websites/HappyCamel/', 'test123.zip', function(err) {
to
res.download('./test123.zip', 'test123.zip', function(err) {
but now I get
angular.min.js:114 ReferenceError: success is not defined
error on my browser, but no errors in my node console (my "no error" line is printing)
you are using relative path. when you do this:
res.download('Users/Jordan/Desktop/Websites/HappyCamel/', 'test123.zip', function(err) {
it will look for Users/Jordan/Desktop/Websites/HappyCamel/ inside your current file's directory. looks like what you need is full path, or better a correct relative path- from the error it looks like the file is located with your code, so this should do:
res.download('./', 'test123.zip', function(err) {

Mocking file system contents not working with gulp.src

I'm trying to use mock-fs to mock up file system contents to test gulp tasks. Unfortunately, gulp.src doesn't seem to play well with mock-fs. Specifically, I get ENOENT errors:
Message:
ENOENT, lstat '/vagrant/study-node-heroku/instances/development/app.json'
Details:
errno: -2
code: ENOENT
path: /vagrant/study-node-heroku/instances/development/app.json
domainEmitter: [object Object]
domain: [object Object]
domainThrown: false
Stack:
Error: ENOENT, lstat '/vagrant/study-node-heroku/instances/development/app.json'
at Error (native)
Other parts of my code and test code access the mock-fs-created files just fine.
What am I doing wrong? I suspect that the problem is related to gulp's usage of vinyl.
Here is the function under test:
var herokuTarball = function(options, done) {
var instance = options.instance || 'development';
var tarballName = options.tarballName || instance
var tarballPath = path.join(config.temp, tarballName + '.tar.gz');
var files = path.join(config.instances, instance, '**/*');
yassert.file(path.join(config.instances, instance, 'app.json'));
async.waterfall([
function(cb) {
del([tarballPath], cb);
},
function(err, cb) {
gulp.src(files)
.pipe(tar(tarballName + '.tar'))
.pipe(gzip())
.pipe(gulp.dest(config.temp))
.pipe(gcallback(cb));
}
], function(err, result) {
if (err) return err;
return done(err, tarballPath);
});
}
And here is the test snippet:
describe('gulp heroku:tarball', function() {
after('something', function() {
mock.restore();
});
before('something', function() {
mock({
'instances/development': {
'app.json': 'test content'
}
});
});
it('creates a tarball', function(done) {
var options = {}
heroku.herokuTarball(options, function(err, result) {
expect(result).to.be.a('string');
yassert.file(result);
done();
});
});
});
Notice that the yassert (yeoman-assert) calls pass fine -- the file is there. If I take the function with the gulp.src call out of the async waterfall, the error goes away (and the test fails of course).
Issue posted at https://github.com/tschaub/mock-fs/issues/44
You are not doing anything wrong, mock-fs README states:
Note mock-fs is not compatible with graceful-fs#3.x but works with graceful-fs#4.x.
Looking at the dependencies of gulp we get:
$ npm info gulp devDependencies.graceful-fs
^3.0.0
Hence, gulp is still dependent on graceful-fs#3.x, therefore mock-fs will not work.
YMMV, but maybe vinyl-fs-mock is an alternative?

Unable to upload .torrent with node.js and utorrent-api

I'm trying to use utorrent-api library for node.js as in example:
request({'uri' : 'http://releases.ubuntu.com/13.04/ubuntu-13.04-desktop-i386.iso.torrent', 'encoding': null}, function (error, response, torrentFileBuffer) {
utorrent.call('add-file', {'torrent_file': torrentFileBuffer}, function(err, data) {
if(err) { console.log('error : '); console.log(err); return; }
console.log('Successfully added torrent file !');
console.log(data);
});
});
and I'm getting this error in console:
error :
{ [Error: read ECONNRESET] code: 'ECONNRESET', errno: 'ECONNRESET', syscall: 'read' }
I can connect to uTorrent with:
utorrent.call('list', function(err, torrents_list) {
console.log(torrents_list);
});
And I'm getting torrents list correctly.
I tried to save .torrent file to disk and it looks ok, so the problem is with file upload.
uTorrent 3.3 is running on Linux. WebUI is working and I can upload .torrent files through browser.
How can I debug this error?

Resources