I'm developing a node.js application. And I want to use HTTP POST for posting multiple local text files to target web server.
Now I use request npm, so are there any ways to implement my goal with request npm? And of course, I will appreciate other solutions with different libraries.
I meant HTTP POST is executed by node.js itself, not client javascript. With node.js, I want to post multiple local text files to another server.
the most friendly method to send file is proposed by needle
var needle = require('needle');
var data = {
file: '/home/johnlennon/walrus.png',
content_type: 'image/png'
};
needle
.post('https://my.server.com/foo', data, { multipart: true })
.on('readable', function() { /* eat your chunks */ })
.on('end', function() {
console.log('Ready-o, friend-o.');
})
or
needle.post('https://my.server.com/foo', data, {multipart: true},
function(err,result) {
if(err) {
console.log(err);
}
});
Also, i haven't tried, but documentation says that you can pass an array of objects
var data = [
{
file: '/home/johnlennon/walrus1.png',
content_type: 'image/png'
},
{
file: '/home/johnlennon/walrus2.png',
content_type: 'image/png'
}
]
Related
Is it possible to post an image on twitter using nodejs and puppeteer? It find it hard because when you click on the image button on twitter it opens your archive.
It's definetely possible to post image on Twitter automatically (using Node.js). Twitter API Client for node called 'twit' suits for it. You can find the package on github or npm.
// post a tweet with media
var b64content = fs.readFileSync('/path/to/img', { encoding: 'base64' })
// first we must post the media to Twitter
T.post('media/upload', { media_data: b64content }, function (err, data, response) {
// now we can assign alt text to the media, for use by screen readers and
// other text-based presentations and interpreters
var mediaIdStr = data.media_id_string
var altText = "Small flowers in a planter on a sunny balcony, blossoming."
var meta_params = { media_id: mediaIdStr, alt_text: { text: altText } }
T.post('media/metadata/create', meta_params, function (err, data, response) {
if (!err) {
// now we can reference the media and post a tweet (media will attach to the tweet)
var params = { status: 'loving life #nofilter', media_ids: [mediaIdStr] }
T.post('statuses/update', params, function (err, data, response) {
console.log(data)
})
}
})
})
// post media via the chunked media upload API.
// You can then use POST statuses/update to post a tweet with the media attached as in the example above using `media_id_string`.
// Note: You can also do this yourself manually using T.post() calls if you want more fine-grained
// control over the streaming. Example: https://github.com/ttezel/twit/blob/master/tests/rest_chunked_upload.js#L20
//
var filePath = '/absolute/path/to/file.mp4'
T.postMediaChunked({ file_path: filePath }, function (err, data, response) {
console.log(data)
})
It appears that the current mysql driver for Deno does not yet support password authentication. I have just finished an API in PHP and would like to see an example of the same in Deno.
This is as much as you get as far as an example from their site:
import { serve } from "https://deno.land/std#0.58.0/http/server.ts";
const s = serve({ port: 8000 });
console.log("http://localhost:8000/");
for await (const req of s) {
req.respond({ body: "Hello World\n" });
}
Where do I add JSON headers on this?
Is the router native or does it have to be something called OAK?
Is it possible you could add a static GET, POST, PUT DELETE to this example returning post.json, get.json, put.json, delete.json file contents at each respective end point?
I'm just having a hard time finding examples.
Is the router native or does it have to be something called OAK?
No, there's no built-in router. You can use Oak or other HTTP framework.
To return a file, you use Deno.open which returns a Reader, and you can pass that Reader to body property of req.respond, which accepts a Reader, string or Uint8Array.
The following example will read the file {HTTP_METHOD}.json, and return its content, setting the Content-Type header to application/json.
import { serve } from "https://deno.land/std#0.58.0/http/server.ts";
const s = serve({ port: 8000 });
console.log("http://localhost:8000/");
async function handleRequest(req) {
try {
const headers = new Headers({ 'Content-Type': 'application/json' });
const file = await Deno.open(`./${req.method.toLowerCase()}.json`);
await req.respond({ body: file, headers })
} catch(e) {
console.error(e);
req.respond({ body: 'Internal Server Errror', status: 500 });
}
}
for await (const req of s) {
handleRequest(req);
}
The std HTTP server is a bit low level, you probably want to use a Framework.
https://github.com/oakserver/oak
https://drash.land/docs/#/
Frameworks have plenty of examples.
I'm new to node.js. What I'm trying to do is to stream the upload of a file from web browser to a cloud storage through my node.js server.
I'm using 'express', 'request' and 'busboy' modules.
var express = require("express");
var request = require("request");
var BusBoy = require("busboy");
var router = express.Router();
router.post("/upload", function(req, res, next) {
var busboy = new BusBoy({ headers: req.headers });
var json = {};
busboy.on("file", function (fieldname, file, filename, encoding, mimetype) {
file.on("data", function(data) {
console.log(`streamed ${data.length}`);
});
file.on("end", function() {
console.log(`finished streaming ${filename}`);
});
var r = request({
url: "http://<my_cloud_storage_api_url>",
method: "POST",
headers: {
"CUSTOM-HEADER": "Hello",
},
formData: {
"upload": file
}
}, function(err, httpResponse, body) {
console.log("uploaded");
json.response = body;
});
});
busboy.on("field", function(name, val) {
console.log(`name: ${name}, value: ${value}`);
});
busboy.on("finish", function() {
res.send(json);
});
req.pipe(busboy);
});
module.exports = router;
But I keep getting the following error on the server. What am I doing wrong here? Any help is appreciated.
Error: Part terminated early due to unexpected end of multipart data
at node_modules\busboy\node_modules\dicer\lib\Dicer.js:65:36
at nextTickCallbackWith0Args (node.js:420:9)
at process._tickCallback (node.js:349:13)
I realize this question is some 7 months old, but I shall answer it here in an attempt help anyone else currently banging their head against this.
You have two options, really: Add the file size, or use something other than Request.
Note: I edited this shortly after first posting it to hopefully provide a bit more context.
Using Something Else
There are some alternatives you can use instead of Request if you don't need all the baked in features it has.
form-data can be used by itself in simple cases, or it can be used with, say, got. request uses this internally.
bhttp advertises Streams2+ support, although in my experience Streams2+ support has not been an issue for me. No built in https support, you have to specify a custom agent
got another slimmed down one. Doesn't have any special handling of form data like request does, but is trivially used with form-data or form-data2. I had trouble getting it working over a corporate proxy, though, but that's likely because I'm a networking newb.
needle seems pretty light weight, but I haven't actually tried it.
Using Request: Add the File Size
Request does not (as of writing) have any support for using transfer-encoding: chunked so to upload files with it, you need to add the file's size along with the file, which if you're uploading from a web client means that client needs to send that file size to your server in addition to the file itself.
The way I came up with to do this is to send the file metadata in its own field before the file field.
I modified your example with comments describing what I did. Note that I did not include any validation of the data received, but I recommend you do add that.
var express = require("express");
var request = require("request");
var BusBoy = require("busboy");
var router = express.Router();
router.post("/upload", function(req, res, next) {
var busboy = new BusBoy({ headers: req.headers });
var json = {};
// Use this to cache any fields which are file metadata.
var fileMetas = {};
busboy.on("file", function (fieldname, file, filename, encoding, mimetype) {
// Be sure to match this prop name here with the pattern you use to detect meta fields.
var meta = fileMetas[fieldname + '.meta'];
if (!meta) {
// Make sure to dump the file.
file.resume();
// Then, do some sort of error handling here, because you cannot upload a file
// without knowing it's length.
return;
}
file.on("data", function(data) {
console.log(`streamed ${data.length}`);
});
file.on("end", function() {
console.log(`finished streaming ${filename}`);
});
var r = request({
url: "http://<my_cloud_storage_api_url>",
method: "POST",
headers: {
"CUSTOM-HEADER": "Hello",
},
formData: {
// value + options form of a formData field.
"upload": {
value: file,
options: {
filename: meta.name,
knownLength: meta.size
}
}
}
}, function(err, httpResponse, body) {
console.log("uploaded");
json.response = body;
});
});
busboy.on("field", function(name, val) {
// Use whatever pattern you want. I used (fileFieldName + ".meta").
// Another good one might be ("meta:" + fileFieldName).
if (/\.meta$/.test(name)) {
// I send an object with { name, size, type, lastModified },
// which are just the public props pulled off a File object.
// Note: Should probably add error handling if val is somehow not parsable.
fileMetas[name] = JSON.parse(val);
console.log(`file metadata: name: ${name}, value: ${value}`);
return;
}
// Otherwise, process field as normal.
console.log(`name: ${name}, value: ${value}`);
});
busboy.on("finish", function() {
res.send(json);
});
req.pipe(busboy);
});
module.exports = router;
On the client, you need to then send the metadata on the so-named field before the file itself. This can be done by ordering an <input type="hidden"> control before the file and updating its value onchange. The order of values sent is guaranteed to follow the order of inputs in appearance. If you're building the request body yourself using FormData, you can do this by appending the appropriate metadata before appending the File.
Example with <form>
<script>
function extractFileMeta(file) {
return JSON.stringify({
size: file.size,
name: file.name,
type: file.type,
lastUpdated: file.lastUpdated
});
}
function onFileUploadChange(event) {
// change this to use arrays if using the multiple attribute on the file input.
var file = event.target.files[0];
var fileMetaInput = document.querySelector('input[name=fileUpload.meta]');
if (fileMetaInput) {
fileMetaInput.value = extractFileMeta(file);
}
}
</script>
<form action="/upload-to-cloud">
<input type="hidden" name="fileUpload.meta">
<input type="file" name="fileUpload" onchange="onFileUploadChange(event)">
</form>
Example with FormData:
function onSubmit(event) {
event.preventDefault();
var form = document.getElementById('my-upload-form');
var formData = new FormData();
var fileUpload = form.elements['fileUpload'];
var fileUploadMeta = JSON.stringify({
size: fileUpload.size,
name: fileUpload.name,
type: fileUpload.type,
lastUpdated: fileUpload.lastUpdated
});
// Append fileUploadMeta BEFORE fileUpload.
formData.append('fileUpload.meta', fileUploadMeta);
formData.append('fileUpload', fileUpload);
// Do whatever you do to POST here.
}
EDITED
I'm trying to structure the files.upload() API provided via Slack but am having a hard time understanding the correct format. At the moment, I am able to use the API to upload a text file but cannot for the life of me figure out how to upload an image.
Here's my issue: I have an image on my development server, let's call it image.png. I want to use the files.upload() API to post that image into a #general Slack channel. Below is the code I have that is successfully generating the image, but currently is just sending the text:
var myBarChart = new Chart(ctx).Bar(barChartData, barChartOptions);
var myBarChartDataURL = leaderboardBarChart.toBase64Image();
canvas.toBuffer(function(err, buf) {
if (err) throw err;
fs.writeFile(__dirname + "/leaderboard.png", buf);
});
bot.api.files.upload({
token: process.env.token,
title: "Image",
filename: "image.png",
filetype: "auto",
//content: "Posted with files.upload API",
file: fs.createReadStream("path/to/image_file.png"),
channels: filtered[0].id
}, function(err, response) {
if (err) {
console.log("Error (files.upload) " + err);
} else {
console.log("Success (files.upload) " + response);
};
});
When I run the code I get one of the following error:
"invalid_array_arg" which Slack details as: "The method was passed a PHP-style array argument (e.g. with a name like foo[7]). These are never valid with the Slack API."
I'm not entirely sure what to make of this error as I'm not using PHP nor anything that I can identify that would be PHP-like.
I've experimented with several different approaches for including the file path, whether using the 'fs' module, storing it in a variable, or just referencing it's absolute path (and even a relative path). I'm a bit lost and am just looking for some guidance.
I understand that this particular API uses multipart/form-data but I don't have a form. This app is strictly a NodeJS app. There is no framework (like Express) working in tandem with the main node script.
Any and all help is really appreciated. Again, just looking for some insight/guidance on what I'm missing or doing wrong.
Thanks in advance!
It looks like you'll have to go outside of Botkit's API here, since Botkit doesn't seem to support sending multipart/form-data.
Give this a try, using request directly (already in use by Botkit itself):
var request = require('request');
...
request.post({
url: 'https://slack.com/api/files.upload',
formData: {
token: bot.config.token,
title: "Image",
filename: "image.png",
filetype: "auto",
channels: filtered[0].id,
file: fs.createReadStream('test.png'),
},
}, function (err, response) {
console.log(JSON.parse(response.body));
});
I recommend you using the nodejslack.
It uses the Promises pattern, powered by Bluebird .
There is a sample code for uploading file in its documentations, here it is:
var Slack = require('nodejslack');
var fs = require('fs');
var SLACK_TOKEN = process.env.SLACK_TOKEN || 'YOUR_GENERATED_SLACK_TOKEN';
var slack = new Slack(SLACK_TOKEN);
var form = {
file: fs.createReadStream('test.csv'), // Optional, via multipart/form-data. If omitting this parameter, you MUST submit content
// content: 'Your text here', // Optional, File contents. If omitting this parameter, you must provide a `file`
filename: 'test.csv', // Required
fileType: 'post', // Optional, See more file types in https://api.slack.com/types/file#file_types
title: 'Title of your file!', // Optional
initial_comment: 'First comment about this file.', // Optional
channels: 'general' //Optional, If you want to put more than one channel, separate using comma, example: 'general,random'
};
slack.fileUpload(form)
.then(function(response){
// Slack sends a json with a boolean var ok.
// Error example : data = { ok: false, error: 'user_not_found' }
// Error example : data = { ok: true, file: 'user_not_found' }
if(!response || !response.ok){
return Promise.reject(new Error('Something wrong happened during the upload.'));
}
console.log('Uploaded Successfully:',response);
return Promise.resolve(response);
})
.catch(function(err){
return err;
});
I am using Node.js and the request module to create a backend, and we've chose Elasticsearch as our data storage. All fine so far, except it seems Node doesn't support request bodies on GET requests? This is necessary for Elasticsearch's _search API, which expects only GET requests as part of their semantic design. Is there a solution to force Node to send the request body even in the cases of a GET request, or a mean to use _search on Elasticsearch with another HTTP verb?
function elasticGet(url, data) {
data = data || {};
return Q.nfcall(request.get, {
uri: url,
body: JSON.stringify(data) //<-- noop
}).then(function(response) {
return JSON.parse(response[1]);
}, function(err) {
console.log(err);
});
}
The _search API also accepts the POST verb.
For simplicity, why not use their api rather than manually making requests?
simple example:
var elasticsearch = require('elasticsearch'),
client = new elasticsearch.Client({
host: '127.0.0.1:9200',
log: 'trace'
});
client.search({
index: '[your index]',
q: 'simple query',
fields: ['field']
}, function (err, results) {
if (err) next(err);
var ids = []
if (results && results.hits && results.hits.hits) {
ids = results.hits.hits.map(function (h) {
return h._id;
})
}
searchHandler(ids, next)
})
You can combine it with fullscale labs elastic.js to build really complex queries, really fast.
https://github.com/fullscale/elastic.js
I had such an issue a few days ago.
tld;dr use POST
According to https://www.elastic.co/guide/en/elasticsearch/guide/current/_empty_search.html#get_vs_post you can also use POST with elastic.
I tried it with axios but it returns all data like with no body.
So I used POST instead. It works for me and I hope it will help to someone else.