While uploading the DeviceFarm S3 url for file uploading getting error code:ECONNRESET.
This is my code:
var AWS = require('aws-sdk');
var fs = require('fs');
var req = require('request');
var devicefarm = new AWS.DeviceFarm();
AWS.config.loadFromPath('C:/Users/Abin.mathew/AWSdata/config.json');
var apkPath= "D:/DTS/APKs/TPLegacyPlugin-googleplaystore-debug-rc_16.2.15.apk";
var stats = fs.statSync(apkPath);
var url= "https://prod-us-west-2-uploads.s3-us-west-2.amazonaws.com/arn%3Aaws%3Adevicefarm%3Aus-west-2%3A594587224081%3Aproject%3Ade07f584-7c64-4748-aebd-ec965ab107cf/uploads/arn%3Aaws%3Adevicefarm%3Aus-west-2%3A594587224081%3Aupload%3Ade07f584-7c64-4748-aebd-ec965ab107cf/5dd627eb-4eb2-4f2d-a300-0fde0720bde4/MyAppiumPythonUpload?AWSAccessKeyId";
fs.createReadStream(apkPath).pipe(req({
method: 'PUT',
url: url,
headers: {
'Content-Length': stats['size']
}
}, function (err, res, body) {
console.log(body);
console.log(res);
console.log(err);
}));
Your URL is incorrect. It represents the Appium test package but you are trying to upload an APK. Are you reusing a URL from a previous operation? Pre-signed URLs also expire after a period of time so they should not be reused.
To make this work,
Call CreateUpload and get the pre-signed URL from the result.
Post the correct file to the URL.
We have published a blog post that describes the procedure to follow. The code samples use the CLI but translating them to nodejs should be trivial.
Related
I've been trying to asynchronously send a Blob image to a REST Api using the request module and Azure Storage module. I don't want to download the Blob to a local file and then create a Readable stream from the local file because it's not performant. This is what I have attempted, but it is throwing the error "Unexpected end of MIME multipart stream. MIME multipart message is not complete." From the request docs, sending a file in the form data requires you pass it a Readable Stream. It seems the Readable Stream from the Azure Storage client isn't compatible with the request module's format. Any ideas how to get this to work?
const request = require('request');
const storage = require('azure-storage');
const blobService = storage.createBlobService(process.env.AzureWebJobsStorage);
let stream = blobService.createReadStream(
containerName,
blobName,
function(err, res) {
});
let formData = {
rootMessageId: messageId,
file: stream
};
request.post({
url:'https://host-name/Api/comment',
headers: {'Authorization': `Token ${authToken}`},
formData: formData
}, (err, res, body) => {
console.log(res)
}
});
I tried to use your code to upload an image blob to my owner local url http://localhost/upload, then I found there is missing some properties in the file property of your formData.
Here is my code works.
const request = require('request');
const storage = require('azure-storage');
var accountName = '<your storage account name>';
var accountKey = '<your storage account name>';
var blobService = storage.createBlobService(accountName, accountKey);
let stream = blobService.createReadStream(containerName, blobName, function(err, res){
formdata.file.options.contentType = res.contentSettings.contentType;
console.log(formdata);
});
var formdata = {
rootMessageId: messageId,
file: { // missing some properties
value: stream,
options: {
filename: function(blobName) {
var elems = blobName.split('/');
return elems[elems.length-1];
}(blobName),
knownLength: stream // a required property of `file` is `knownLength` which will cause server error if be missed.
},
}
}
request.post({
url: 'https://host-name/Api/comment', // I used my url `http://localhost/upload` at here
headers: {'Authorization': `Token ${authToken}`}, // I used a empty {} as header at here
formData: formdata
}, (err, res, body) => {
console.log(res)
}
});
Thinking for the code above, it must pipe a download stream to an upload stream and all data also need to flow through your webapp machine. Per my experience, I think you can generate a SAS url of a blob to post to your REST API and then download the blob via your REST server if you can change the code of your REST application server.
I have a rest service made with node.js and express.js, that returns a zip trough response using express-zip:
res.zip([{path:'file', name: 'file'}]);
When i call it with node, to test it, with the following code, it creates a valid zip file:
var fs = require('fs');
var request = require('request');
request({
url: "http://deti-lei-2.ua.pt:3500/runProgram/posio/211/1",
method: "GET",
encoding: null,
headers: {
Authorization: 'token'
}
}).pipe(fs.createWriteStream('file.zip')).on('close', function () {
console.log('File written!');
});
But i need to use the service with angular 2, however, i tried many ways of doing the request and none worked, i always end up with a corrupt zip.
One of the ways i tried was the following:
downloadFile(program: string) {
var bytes = [];
for(var i = 0 ; i < program.length; i++){
bytes.push(program.charCodeAt(i));
}
var FileSaver = require('file-saver');
var blob = new Blob([bytes], {type:'application/zip'});
FileSaver.saveAs(blob,'Program');
}
I also tried creating a function in js with the code above, that was working, but i had no success because i could't use fs and request modules.
I appreciate if someone can help.
Trying to request an image from Google CDN and upload it to S3.
Using the https://github.com/request/request library and Node / Express;
A little confused how to handle payload coming back from Google CDN.
The image comes back in the body field and in encoded. Not sure how it is encoded.
Given a URL to a Google CDN:
const fileURL = https://lh4.googleusercontent.com/EWE1234rFL006WfQKuAVrsYMOiKnM6iztPtLgXM5U…3i26LoPHQwPTQME7ne3XoMriKVjUo3hrhwWw1211223
request(fileURL, (err, res, body) => {
//NOT sure how to handle the response here??
//Trying base64
fs.writeFileSync(`.tmp/file1.png`, body, {encoding: 'base64'});
//Trying Binary
fs.writeFileSync(`.tmp/file.png`, body, {encoding: 'binary'});
}
body comes back as:
�PNG↵↵IHDRv&vл� IDATx�}���<z�f���];��o]��A�N�.po�/�/R���..............
1). Request an image from googleusercontent Google CDN (image was originally pasted in a Google Doc)
2). Create an image file and write to disk on the Server.
Neither of the fs.writeFileSync seem to produce a readable image file.
Any advice on handling this would be awesome..
Pass the response as the body to your S3 upload.
var request = require('request'),
fs = require('fs'),
aws = require('aws-sdk'),
s3 = new aws.S3(),
url = 'https://lh4.googleusercontent.com/-2XOcvsAH-kc/VHvmCm1aOoI/AAAAAAABtzg/SDdN1Vg5FFs/s346/14%2B-%2B1';
# Store in a file
request(url).pipe(fs.createWriteStream('file.gif'));
request(url, {encoding: 'binary'}, function(error, response, body) {
# Another way to store in a file
fs.writeFile('file.gif', body, 'binary', function(err) {});
# Upload to S3
s3.upload({
Body: body,
Bucket: 'BucketName',
Key: 'file.gif',
}, function(err, data) {});
});
I work in a financial institution and for security reasons my employer cannot give out the access key id and the access key secret to the AWS account. This means I can't use aws-sdk.
As a next option, would it be possible to upload files using HTTP PUT to a public S3 bucket without using the AWS-SDK that requires the access key id and the access key secret?
I had a look at this answer: How to upload a file using a rest client for node
And was thinking of this approach:
var request = require('request');
var options = {
method: 'PUT',
preambleCRLF: true,
postambleCRLF: true,
uri: 'https://s3-ap-southeast-2.amazonaws.com/my-bucket/myFile.pdf',
multipart: [
{
'content-type': 'application/pdf'
body: fs.createReadStream('/uploads/uploaded-file.pdf')
}
]
}
request(options, function(err, response, body){
if(err){
return console.log(err);
}
console.log('File uploaded to s3');
});
Could that work?
Your above code works only if you have custom storage(that too it should be public) and not for AWS storage.
For AWS storage access key id and the access key secret is mandatory, without these you cannot upload the files to storage
This is a bit old but for anyone looking for the same you can now use a pre signed url to achieve this, how it works is you create a preSigned url on your server, share it with the client and use this to upload the file to s3
server to generate an url:
const AWS = require('aws-sdk')
const s3 = new AWS.S3({
region: 'us-east-1',
signatureVersion: 'v4'
})
AWS.config.update({accessKeyId: 'access-key', secretAccessKey: 'access-pass'})
const myBucket = 'clearg-developers'
const myKey = 'directory/newFile.zip'
const signedUrlExpireSeconds = 60 * 5 //seconds the url expires
const url = s3.getSignedUrl('putObject', {
Bucket: myBucket,
Key: myKey,
Expires: signedUrlExpireSeconds
});
return url
and on the client from node you can put to get an empty body:
var fileName = '/path/to/file.ext';
var stats = fs.statSync(fileName);
fs.createReadStream(fileName).pipe(request({
method: 'PUT',
url: url,
headers: {
'Content-Length': stats['size']
}
}, function (err, res, body) {
console.log('success');
}));
I am trying to upload a file using request module to Telegram's Bot API. However, I end up with a 502 Gateway Error. Here's my code:
var request = require("request");
var fs = require("fs");
fs.readFile("image.png",function(err,data){
var formdata = {};
formdata.chat_id = <chatid>;
formdata.photo = data;
if(err)
console.log(err);
request({
url : "https://api.telegram.org/bot<token>/sendPhoto",
method : "POST",
headers : {
"Content-Type" : "multipart/form-data"
},
formData : formdata
},function(err,res,body){
if(err)
console.log(err)
console.log(body);
})
});
Is this the proper way to upload a file or am I making a mistake somewhere?
I suggest, it's better for you to use form field of request object, which gives you possibility to send file using createReadStream function of fs module.For example:
var r = request.post({
url: url
},someHandler);
var form = r.form();
form.append('file',fs.createReadStream(filePath));
For proper use read:
https://nodejs.org/api/fs.html#fs_fs_createreadstream_path_options
https://github.com/request/request#forms