I want to save files temporarily to my heroku file system so that I can send them in an email. As long as the email sends with the attachment, I don't care what happens to the file after that.
var path = require("path");
var temp_dir = path.join(process.cwd(), 'temp/');
if (!fs.existsSync(temp_dir))
{fs.mkdirSync(temp_dir);}
request.get({
url: 'https://docs.google.com/spreadsheets/export?id='+posting.driveID+'&exportFormat=xlsx',
encoding: null, // Force Request to return the data as Buffer
headers: {
Authorization: "Bearer "+access_token
}
}, function done (err, res) {
fs.writeFile('temp/temp.xlsx', res.body, function (err) {
console.log(err);
})
});
var mailOptions={
from : "",
to : "",
subject : "new download",
generateTextFromHTML : true,
html : "<h2>Download "+posting.title+"</h2>",
attachments: [{
filePath: "temp/temp.xlsx"
}]
};
transporter.sendMail(mailOptions, function(error, resp){
if(error) {
}
else{
console.log("Message sent: " + resp.message);
}
transporter.close();
});
I heard that heroku had the ability to host a /temp directory that wipes itself when the dyno recharges. I have tried to use that but have not had any luck. I receive the email with the temp.xlsx document but it is corrupt with no content. Making wonder if nodemailer cannot find the file so it just makes one with that name.
There is no clear documentation on using heroku's file system so I am wondering where my issue is. Otherwise I may just switch over to S3.
Heroku doesn't really do that. There is a way, but it's unreliable.
Heroku instances are designed to be read only. This let's you spin up/down as many as you like to scale.
The Heroku way would be to either:
Store them in memory (if they are only needed for that instance)
Store them in a database (if they are used by all instances)
Note that you can send a stream as an attachment. You don't need to use an actual file.
res.attachment('pdfname.pdf');
someStream.pipe(res);
http://expressjs.com/en/api.html#res.attachment
Related
Okay, I see a few versions of this question out here, but I think mine goes a bit deeper than some of the others and I don't see satisfactory answers.
What I Want:
I have a page on a website that will have its contents frequently changed by users, and I want them to hit a button that says 'Generate PDF', and be able to turn the relevant information on the page into a nice looking PDF report.
I'm trying to do this with NodeJS, Express, with a deployment on Heroku.
Generically, this is how I'm trying to accomplish this:
MY PLAN:
Initiate a GET Request
Query a database for relevant results
Load data into an EJS template
Save the resulting HTML
Use the npm module HTML-pdf to stream it into the browser
Other important details:
This file is meant to be ephemeral. I don't want to save it
anywhere.
I prefer not to even have to write it anywhere, since it's only
meant to exist in the browser at that moment.
This app is deployed on Heroku, and need to work within its
constructs. I prefer not to use a file store like S3.
MY CODE:
router.get('/formulas/:id/pdf', function(req, res){
var db = req.db.collection('users');
var id = new ObjectID(req.params.id);
var pointer = {"formulas.$": 1, "_id": 0};
//query database, get the info out
db.aggregate([
{$match: {"formulas.f_id": id}},
{$unwind: "$formulas"},
{$match: {"formulas.f_id": id}},
{$project : {"formulas": 1, "_id": 0}}
]).toArray(function(e, doc){
if (e) {
throw e;
} else {
var html = null;
ejs.renderFile('./views/pdf.ejs', { //create template from db query
project: doc[0].formulas,
title: 'Formula Info Report',
description: 'PDF Report For Your Formula by Nutraceutical Pro',
ID: 'pdf',
keywords: 'PDF, PDF generator, Formula Info Report',
user: req.user,
loggedIn: req.isAuthenticated()
}, function(err, results){
if (err) {
console.log(err);
}
html = results; //save results of HTML output
});
var options = { format: 'Letter' };
var path = './public/pdf/formula-' + req.params.id + '.pdf';
pdf.create(html, options).toStream(function(err, stream) {//initiate stream
if (err) {
return console.log(err);
}
if (stream) {
console.log(stream);
stream.pipe(fs.createWriteStream(path));
console.log("the pdf was streamed.");
res.end();
}
});
}
});
});
I initiate the GET request with an AJAX call like so:
$('.getPDF').click(function(){
var filepath = 'https://my-domain.herokuapp.com/pdf/formula-' + this.id + '.pdf';
$.ajax({
url: '/formulas/'+ this.id +'/pdf',
type: 'GET',
success: function () {window.open(filepath);}
});
When I run the script I generate a stream, and the console logs that the PDF was created. If I just save the resulting file to my local system it comes out fine, but in my app I get the following error message:
Cannot GET /pdf/formula-59cc38992fb99b00045832ad.pdf
So it's like the raw material for the PDF is being created, but it's not being logged and created in such a way that allows the app to read it.
I'm open to a variety of solutions for this, so if there are much easier ways to do it I'm all ears. Prefer to stick with my current stack though.
I'm developing a node.js application. And I want to use HTTP POST for posting multiple local text files to target web server.
Now I use request npm, so are there any ways to implement my goal with request npm? And of course, I will appreciate other solutions with different libraries.
I meant HTTP POST is executed by node.js itself, not client javascript. With node.js, I want to post multiple local text files to another server.
the most friendly method to send file is proposed by needle
var needle = require('needle');
var data = {
file: '/home/johnlennon/walrus.png',
content_type: 'image/png'
};
needle
.post('https://my.server.com/foo', data, { multipart: true })
.on('readable', function() { /* eat your chunks */ })
.on('end', function() {
console.log('Ready-o, friend-o.');
})
or
needle.post('https://my.server.com/foo', data, {multipart: true},
function(err,result) {
if(err) {
console.log(err);
}
});
Also, i haven't tried, but documentation says that you can pass an array of objects
var data = [
{
file: '/home/johnlennon/walrus1.png',
content_type: 'image/png'
},
{
file: '/home/johnlennon/walrus2.png',
content_type: 'image/png'
}
]
EDITED
I'm trying to structure the files.upload() API provided via Slack but am having a hard time understanding the correct format. At the moment, I am able to use the API to upload a text file but cannot for the life of me figure out how to upload an image.
Here's my issue: I have an image on my development server, let's call it image.png. I want to use the files.upload() API to post that image into a #general Slack channel. Below is the code I have that is successfully generating the image, but currently is just sending the text:
var myBarChart = new Chart(ctx).Bar(barChartData, barChartOptions);
var myBarChartDataURL = leaderboardBarChart.toBase64Image();
canvas.toBuffer(function(err, buf) {
if (err) throw err;
fs.writeFile(__dirname + "/leaderboard.png", buf);
});
bot.api.files.upload({
token: process.env.token,
title: "Image",
filename: "image.png",
filetype: "auto",
//content: "Posted with files.upload API",
file: fs.createReadStream("path/to/image_file.png"),
channels: filtered[0].id
}, function(err, response) {
if (err) {
console.log("Error (files.upload) " + err);
} else {
console.log("Success (files.upload) " + response);
};
});
When I run the code I get one of the following error:
"invalid_array_arg" which Slack details as: "The method was passed a PHP-style array argument (e.g. with a name like foo[7]). These are never valid with the Slack API."
I'm not entirely sure what to make of this error as I'm not using PHP nor anything that I can identify that would be PHP-like.
I've experimented with several different approaches for including the file path, whether using the 'fs' module, storing it in a variable, or just referencing it's absolute path (and even a relative path). I'm a bit lost and am just looking for some guidance.
I understand that this particular API uses multipart/form-data but I don't have a form. This app is strictly a NodeJS app. There is no framework (like Express) working in tandem with the main node script.
Any and all help is really appreciated. Again, just looking for some insight/guidance on what I'm missing or doing wrong.
Thanks in advance!
It looks like you'll have to go outside of Botkit's API here, since Botkit doesn't seem to support sending multipart/form-data.
Give this a try, using request directly (already in use by Botkit itself):
var request = require('request');
...
request.post({
url: 'https://slack.com/api/files.upload',
formData: {
token: bot.config.token,
title: "Image",
filename: "image.png",
filetype: "auto",
channels: filtered[0].id,
file: fs.createReadStream('test.png'),
},
}, function (err, response) {
console.log(JSON.parse(response.body));
});
I recommend you using the nodejslack.
It uses the Promises pattern, powered by Bluebird .
There is a sample code for uploading file in its documentations, here it is:
var Slack = require('nodejslack');
var fs = require('fs');
var SLACK_TOKEN = process.env.SLACK_TOKEN || 'YOUR_GENERATED_SLACK_TOKEN';
var slack = new Slack(SLACK_TOKEN);
var form = {
file: fs.createReadStream('test.csv'), // Optional, via multipart/form-data. If omitting this parameter, you MUST submit content
// content: 'Your text here', // Optional, File contents. If omitting this parameter, you must provide a `file`
filename: 'test.csv', // Required
fileType: 'post', // Optional, See more file types in https://api.slack.com/types/file#file_types
title: 'Title of your file!', // Optional
initial_comment: 'First comment about this file.', // Optional
channels: 'general' //Optional, If you want to put more than one channel, separate using comma, example: 'general,random'
};
slack.fileUpload(form)
.then(function(response){
// Slack sends a json with a boolean var ok.
// Error example : data = { ok: false, error: 'user_not_found' }
// Error example : data = { ok: true, file: 'user_not_found' }
if(!response || !response.ok){
return Promise.reject(new Error('Something wrong happened during the upload.'));
}
console.log('Uploaded Successfully:',response);
return Promise.resolve(response);
})
.catch(function(err){
return err;
});
I've developed a nodeJS API (using express) which allow users to login and get a list of files that they have stored in a remote server. And as you understand, the code must be non-blocking so the webserver can still responds to logging in requests, even if there are some users fetching theirs files lists.
Every time a user make a request to get his files list, the listOfFiles function is called.
This is the code:
exports.listOfFiles = function(req,res){
db.Account.find({where: {id:1}}).then(function(newAcc){
console.log("encontrou a account");
getFiles('/', newAcc.accessToken, '0', newAcc, function(error){
if (error) {
log.error('Error getting files');
}else{
console.log("callback!")
}
});
});
}
getFiles function: this function is responsible for fetching the file list from the remote server, and store them in a postgres database
function getFiles(path, accessToken, parentID, newAcc, callback){
var client = new ExternalAPI.Client({
key: config.get("default:clientId"),
secret: config.get("default:clientSecret")
});
client._oauth._token = accessToken;
var options = {
removed : false,
deleted : false,
readDir: true
}
//this is the instruction that fetch an array of items
//(metadata only) from a remote server
client.stat(path, options, function(error, entries) {
if (error) {
if (error.status == 429) {
console.log(accessToken + 'timeout')
setTimeout(
getFiles(path, accessToken, parentID, callback),
60000);
}else{
log.error(error);
callback(error,null);
}
}
else {
//When the array os items arrives:
console.log("RECEIVED FILES")
var inserted = 0;
var items = entries._json.contents;
for(var file in items){
var skyItemID = uuid.v1();
var name = items[file].path.split('/').pop();
var itemType;
if (items[file].is_dir) {
itemType = 'folder';
}else{
itemType = 'file';
}
newAcc.createItem({
name : name,
lastModified: items[file].modified,
skyItemID: skyItemID,
parentID: parentID,
itemSize: items[file].bytes,
itemType : itemType,
readOnly: items[file].read_only,
mimeType: items[file].mime_type
}).then(function(item){
console.log(item.name)
if (++inserted == items.length) {
console.log(inserted)
console.log(items.length)
console.log("callsback")
callback();
}
}).catch(function(error){
log.error('[DROPBOX] - Filename with special characters');
callback(new Error);
return;
});
}
}
});
}
The problem here is, the moment that webserver prints console.log("RECEIVED FILES") in our console, it stops responding to all other requests, such as log in or fetch files requests from other users.
And it starts responding again when it prints console.log("callback!"). So, i'm assuming that somehow nodeJS is blocking itself until getFiles function is finished and called back.
I think that this is not a normal behaviour. Shouldn't nodeJS be responding to responds to other requests even if there are some operations running in background? Shouldn't getFiles function being run in background and not affecting/blocking all other requests? What am I doing wrong?
Thanks!
I am facing the same kind of problem for long time server http request blocks the service for response other client requests. This is my topic. What is the correct behavior for Node.js while Node.js is requesting a long time http request for other servers Currently, I got no answer for that. If you got the answer, please reply me. Thanks.
I wrote a node.js app that displays in real time pictures from Instagram with a specific hashtag. The app works very well and is very quick. What i would like to add, is the possibility to save all displayed images into a specific folder (on my computer) for printing (then adding a script to do that).
Do you know if there is a library that would allow me to save those images into a folder ? Thank you
The fact that it is an Instagram photo and you are getting it in a real-time is not really important in saving the photo on your local hard drive.
Regardless of you using any npm module or simply using your own code some where at the end you should have a URL to the photo like this:
"standard_resolution": {
"url": "http://distillery.s3.amazonaws.com/media/2011/02/01/34d027f155204a1f98dde38649a752ad_7.jpg",
"width": 612,
"height": 612
}
So you can simply use:
var http = require('http-get');
var options = {url: 'http://distillery.s3.amazonaws.com/media/2011/02/01/34d027f155204a1f98dde38649a752ad_7.jpg'};
http.get(options, '/path/to/where/you/want/myPic1.jpg', function (error, result) {
if (error) {
console.error(error);
} else {
console.log('Photo successfully downloaded and saved at: ' + result.file);
}
});
or I have seen this too (I can't remember where but I have it as snippet):
var http = require('http')
, fs = require('fs')
, options
options = {
host: 'www.google.com'
, port: 80
, path: '/images/logos/ps_logo2.png'
}
var request = http.get(options, function(res){
var imagedata = ''
res.setEncoding('binary')
res.on('data', function(chunk){
imagedata += chunk
})
res.on('end', function(){
fs.writeFile('logo.png', imagedata, 'binary', function(err){
if (err) throw err
console.log('File saved.')
})
})
})
Since it is an http request the respond time depends on so many factors (from the speed of your internet, Instagram server respond time, all the way to your hard drive speed).
So if you can make a queue with Redis or other message queue tools you can make sure all the photos eventually will be saved locally if your app is running 24x7 with lots of subscriptions.
Good luck!
Update: I found the URL to my code snippets: Writing image to local server
Appreciate these guys for their codes! I just pointed out.