JSON object not being sent through PUT UnityWebRequest - node.js

So, I have this code in Unity:
var www = UnityWebRequest.Put(NetworkManager.Instance.data.ConnectionURL + "/upload-schedule", contents);
www.SetRequestHeader("Content-Type", "application/json");
yield return www.SendWebRequest();
if (www.result == UnityWebRequest.Result.ConnectionError)
{
Debug.LogError("There was an error uploading the schedule");
}
else {
string downloadContents = www.downloadHandler.text;
Debug.Log(downloadContents);
}
And then I have this node express application receiving this data:
app.put('/upload-schedule', (req, res) => {
var filePath = "./data/schedule.json";
var data = JSON.stringify(req.body);
// Write file
fs.writeFile(filePath, data, (err) => {
// Dip if an error is found
if (err) {
console.error(err);
return;
}
// Send found data
res.send("Successfuly uploaded file!");
console.log("Successfuly uploaded file!");
});
});
For some reason, this works fine on localhost, but, as soon as I use my web api, req.body always comes out empty. Does anyone know the reason for this and how I can fix it? Thank you

Related

How to return database data from NodeJS Express routes?

Working with NodeJS for the first time, trying to build a public endpoint that an accept an XML file, convert it to JSON, save it to MongoDB, then send a 200 HTTP status code, if everything went well. But the Express route completes and sends a response long before the code writing to the database completes.
A slightly simplified version:
app.post('/ams', function (req, res) {
try {
if (Object.keys(req.body).length === 0) {
console.log("request body was empty");
// throw new EmptyBodyException(req.body);
message = message + "Request body was empty \n";
}
let body_keys = Object.keys(req.body);
let body_values = Object.values(req.body);
let xml_string = body_keys[0] + ":" + body_values[0];
let xml = xml_string.trim();
console.log("The trimmed XML:");
console.log(xml);
// convert XML to JSON
xml2js.parseString(xml, (err, json) => {
if(err) {
message = "xml2js.parseString failed, xml was:." + xml + "\n\n" + err;
console.log(message);
res.status(500);
res.send(message);
}
const documentType = json.Document.DocumentType;
if (documentType == 945) {
const shipment = json.Document.Shipment[0];
const shipment_header = shipment.ShipmentHeader[0];
const addresses = shipment.Addresses[0].Address;
const order_header = shipment.Orders[0].Order[0].OrderHeader[0];
const cartons = shipment.Orders[0].Order[0].Cartons[0].Carton;
const unique_id = shipment_header.ShipmentID[0];
found_document_promise = document_is_redundant(AMS_945, unique_id);
found_document_promise.then(function (found_document) {
if (found_document != null) {
console.log("Redundant document. Perhaps a Pair Eyewear engineer was running a test?");
res.status(200);
message = "Redundant document. Perhaps a Pair Eyewear engineer was running a test? documentType: " + documentType;
res.send(message);
} else {
new AMS_945({
shipment_header : shipment_header,
addresses : addresses,
order_header : order_header,
cartons : cartons,
unique_id : unique_id
})
.save()
.then(function () {
// console.log("saved AMS_945");
message = "saved AMS_945";
res.status(200);
res.send(message);
})
.catch(function (err) {
message = "error when saving AMS_945 to database: " + "\n\n" + err;
console.log(message);
res.status(500);
res.send(message);
});
}
})
.catch(function(err) {
message = "error when checking for a redundant AMS_945 document: " + "\n\n" + err;
console.log(message);
res.status(500);
res.send(message);
});
// down at the bottom I have some generic catch-all:
res.status(200);
res.send("Catch all response.")_;
If I don't have the catch all response at the end then the connection simply hangs until the 30 second timeout is hit, and then I get 504 Bad Gateway.
With the catch-all response at the bottom, thankfully I don't get the timeout error, but I do get an error "Sending headers after response was sent" because at some point the database code returns and tries to send its response, but this is long after the Express route function has completed and sent that generic catch-all code that I have at the bottom.
I'd be happy to get rid of the catch-all res.send() and just have the res.send() inside the database code, but that never seems to return.
So how to I get the Express route function to wait till the database code has returned?

Postman aways identify 200 on Nodejs + Express even when i send other

I'm using API coded in NodeJS but always Postman identify HttpStatus 200, even when i send 4XX or other.
I'm using express and body-parser
As you can see below:
The code is:
router.get('/', async(req, res) => {
try{
let dados = await busca(0);
let pasta = '/upload/'
let nomearquivo = '';
var contents = '';
if (dados){
for (const [idx,row] of dados.entries()){
nomearquivo = pasta + row.arquivotitulo;
if (fs.existsSync(nomearquivo)){
contents = fs.readFileSync(nomearquivo, {encoding:'base64'});
dados[idx]['arquivotitulo64'] = contents;
}
}
return res.status(200).send(dados);
}else{
return res.sendStatus(404);
}
}catch (err){
console.log(err);
return res.sendStatus(500);
}
});
Can anyone help me to send correctly resposes that turns possible to identify in Postman the correct Status code?
Try like this res.status(404).send('Not Found'); It should work and straightforwrd

JSFORCE: File is not supported message is being shown while downloading file from sfdc rest api and write on local disk

I have been trying to download image file using JS force for node js and able to create a file on local after retrieving data and converting it to base64 format but image if showing "file not supported message" whereas being able to download javascript type of file with correct data.
I am querying the attachment field of knowledge article in salesforce.
Following is my query :
SELECT Body__c, Attachment__Name__s, Attachment__ContentType__s, Attachment__Length__s, Attachment__Body__s, Id, KnowledgeArticleId, Title, UrlName FROM Knowledge__kav
I am sending GET request to Attachment__Body__s field of article.
Following is my node js code:
function createFile(attachmentBody,attachmntContentType,attachmntName){
var req = {
url: attachmentBody,
method: 'GET',
headers: {
"Content-Type": attachmntContentType
}
};
var test = conn.request(req, function(err, resp) {
if (err) {
console.log(err)
} else {
var fileBuffer=Buffer.from(resp, 'binary').toString('base64');
console.log('fileBuffer--- '+ fileBuffer);
fs.writeFile('./downloadedAttachments/'+attachmntName,fileBuffer,'base64', function(err){
if (err) throw err
console.log('File saved.')
})
}
});
}
Please help me with this.
I am successfully able to download the file in the correct format. following is my updated code :
function createFile(knbid,attachmntName,callback) {
var file_here = conn.sobject('Knowledge__kav').record(knbid);
file_here.retrieve(function (err, response) {
if (err) {
return console.error(err);
callback(0)
} else {
var obj = fs.createWriteStream('./downloadedAttachments/'+attachmntName, {defaultEncoding: 'binary'})
//console.log('blob--'+JSON.stringify(file_here.blob('Attachment__Body__s')));
var stream = file_here.blob('Attachment__Body__s').pipe(obj);
stream.on('finish', function (err, result) {
if (err)
console.log('not downloaded'+knbid);
else
console.log('downloaded-'+knbid);
})
}
});
}

Uploading a file in Azure File Storage using node.js

We are trying create an webservice to upload files to Azure file storage using node.js service.
Below is the node.js server code.
exports.post = function(request, response){
var shareName = request.headers.sharename;
var dirPath = request.headers.directorypath;
var fileName = request.headers.filename;
var body;
var length;
request.on("data", function(chunk){
body += chunk;
console.log("Get data");
});
request.on("end", function(){
try{
console.log("end");
var data = body;
length = data.length;
console.log(body); // This giving the result as undefined
console.log(length);
fileService.createFileFromStream(shareName, dirPath, fileName, body, length, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
}else{
response.send(statusCodes.OK, "Error!");
}
});
}catch (er) {
response.statusCode = 400;
return res.end('error: ' + er.message);
}
});
}
Below is our client to upload a file.
private static void sendPOST() throws IOException {
URL obj = new URL("https://crowdtest-fileservice.azure-mobile.net/api/files_stage/");
HttpURLConnection con = (HttpURLConnection) obj.openConnection();
con.setRequestMethod("POST");
con.setRequestProperty("sharename", "newamactashare");
con.setRequestProperty("directorypath", "MaheshApp/TestLibrary/");
con.setRequestProperty("filename", "temp.txt");
Path path = Paths.get("C:/Users/uma.maheshwaran/Desktop/Temp.txt");
byte[] data = Files.readAllBytes(path);
// For POST only - START
con.setDoOutput(true);
OutputStream os = con.getOutputStream();
os.write(data);
os.flush();
os.close();
// For POST only - END
int responseCode = con.getResponseCode();
System.out.println("POST Response Code :: " + responseCode);
if (responseCode == HttpURLConnection.HTTP_OK) { // success
BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()));
String inputLine;
StringBuffer response = new StringBuffer();
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
System.out.println(inputLine);
}
in.close();
// print result
System.out.println(response.toString());
} else {
BufferedReader br = new BufferedReader(new InputStreamReader(con.getErrorStream()));
String line = "";
while ((line = br.readLine()) != null) {
System.out.println(line);
}
System.out.println("POST request not worked");
}
}
It is showing the error
The request 'POST /api/files_stage/' has timed out. This could be
caused by a script that fails to write to the response, or otherwise
fails to return from an asynchronous call in a timely manner.
Updated:
I have also tried below code.
var body = new Object();
body = request.body;
var length = body.length;
console.log(request.body);
console.log(body);
console.log(length);
try {
fileService.createFileFromStream(shareName, dirPath, fileName, body, length, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
}else{
response.send(statusCodes.OK, "Error!");
}
});
} catch (ex) {
response.send(500, { error: ex.message });
}
But facing the issue
{"error":"Parameter stream for function createFileFromStream should be
an object"}
I am new to node.js. Please help me to fix this.
There are several issue here. Let us go over them one by one.
1. In your Java client you cannot just dump the binary data into an Azure mobile service connection.
The reason for this is that an Azure mobile service has two body parsers that ensure that no matter what, the request body is parsed for you.
So, while you can walk around the Express body parser by specifying an uncommon content type, you will still hit the Azure body parser that will mess up your data stream by naively assuming that it is a UTF-8 string.
The only option therefore is to skip the Express parser by specifying a content type it cannot handle and then play along with the Azure parser by encoding your binary data with Base64 encoding.
So, in the Java client replace
Path path = Paths.get("C:/Users/uma.maheshwaran/Desktop/Temp.txt");
byte[] data = Files.readAllBytes(path);
with
con.setRequestProperty("content-type", "binary");
Path path = Paths.get("C:/Users/uma.maheshwaran/Desktop/Temp.txt");
byte[] data = Files.readAllBytes(path);
data = Base64.getEncoder().encode(data);
If you are not on Java 8, replace the java.util.Base64 encoder with any other Base64 encoder you have access to.
2. The createFileFromStream Azure storage api function you are trying to use expects a stream.
At the same time, the best you can get when parsing a request body manually is a byte array. Unfortunately, Azure mobile services use NodeJS version 0.8, which means there is no easy way to construct a readable stream from a byte array, and you you will have to assemble your own stream suitable for Azure storage api. Some duct tape and stream#0.0.1 should do just fine.
var base64 = require('base64-js'),
Stream = require('stream'),
fileService = require('azure-storage')
.createFileService('yourStorageAccount', 'yourStoragePassword');
exports.post = function (req, res) {
var data = base64.toByteArray(req.body),
buffer = new Buffer(data),
stream = new Stream();
stream['_ended'] = false;
stream['pause'] = function() {
stream['_paused'] = true;
};
stream['resume'] = function() {
if(stream['_paused'] && !stream['_ended']) {
stream.emit('data', buffer);
stream['_ended'] = true;
stream.emit('end');
}
};
try {
fileService.createFileFromStream(req.headers.sharename, req.headers.directorypath,
req.headers.filename, stream, data.length, function (error, result, resp) {
res.statusCode = error ? 500 : 200;
res.end();
}
);
} catch (e) {
res.statusCode = 500;
res.end();
}
};
These are the dependencies you need for this sample.
"dependencies": {
"azure-storage": "^0.7.0",
"base64-js": "^0.0.8",
"stream": "0.0.1"
}
If specifying them in your service's package.json does not work you can always go to this link and install them manually via the console.
cd site\wwwroot
npm install azure-storage
npm install base64-js
npm install stream#0.0.1
3. To increase the default upload limit of 1Mb, specify MS_MaxRequestBodySizeKB for your service.
Do keep in mind though that since you are transferring you data as Base64-encoded you have to account for this overhead. So, to support uploading files up to 20Mb in size, you have to set MS_MaxRequestBodySizeKB to roughly 20 * 1024 * 4 / 3 = 27307.
I find the easiest way is to use pkgcloud which abstracts the differences between cloud providers and also provides a clean interface for uploading and downloading files. It uses streams so the implementation is memory efficient as well.
var pkgcloud = require('pkgcloud')
var fs = require('fs')
var client = pkgcloud.storage.createClient({
provider: 'azure',
storageAccount: 'your-storage-account',
storageAccessKey: 'your-access-key'
});
var readStream = fs.createReadStream('a-file.txt');
var writeStream = client.upload({
container: 'your-storage-container',
remote: 'remote-file-name.txt'
});
writeStream.on('error', function (err) {
// handle your error case
});
writeStream.on('success', function (file) {
// success, file will be a File model
});
readStream.pipe(writeStream);
We can leverage this answer of the thread on SO How to send an image from Android client to Node.js server via HttpUrlConnection?, which create a custom middleware to get the upload file content into a buffer array, then we can use createFileFromText() to store the file in Azure Storage.
Here is the code snippet:
function rawBody(req, res, next) {
var chunks = [];
req.on('data', function (chunk) {
chunks.push(chunk);
});
req.on('end', function () {
var buffer = Buffer.concat(chunks);
req.bodyLength = buffer.length;
req.rawBody = buffer;
next();
});
req.on('error', function (err) {
console.log(err);
res.status(500);
});
}
router.post('/upload', rawBody,function (req, res){
fileService.createShareIfNotExists('taskshare', function (error, result, response) {
if (!error) {
// if result = true, share was created.
// if result = false, share already existed.
fileService.createDirectoryIfNotExists('taskshare', 'taskdirectory', function (error, result, response) {
if (!error) {
// if result = true, share was created.
// if result = false, share already existed.
try {
fileService.createFileFromText('taskshare', 'taskdirectory', 'test.txt', req.rawBody, function (error, result, resp) {
if (!error) {
// file uploaded
res.send(200, "File Uploaded");
} else {
res.send(200, "Error!");
}
});
} catch (ex) {
res.send(500, { error: ex.message });
}
}
});
}
});
})
router.get('/getfile', function (req, res){
fileService.createReadStream('taskshare', 'taskdirectory', 'test.txt').pipe(res);
})
When the request arrives at the function defined in exports.post, the whole request is already there, so you don't need to buffer it. You can simplify it by writing something along the lines of the code below.
exports.post = function(request, response){
var shareName = request.headers.sharename;
var dirPath = request.headers.directorypath;
var fileName = request.headers.filename;
var body = request.body;
var length = body.length;
console.log(length);
try {
fileService.createFileFromText(shareName, dirPath, fileName, body, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
} else {
response.send(statusCodes.OK, "Error!");
}
});
} catch (ex) {
response.send(500, { error: ex.message });
}
}
There are several things:
1. createFileFromText can work with plain text. But it will fail for those binary content, as it uses UTF-8 encoding.
You might want to refer to the similar issue for blob at: Saving blob (might be data!) returned by AJAX call to Azure Blob Storage creates corrupt image
2. The createFileFromStream or createWriteStreamToExistingFile \ createWriteStreamToNewFile Azure storage API may be the function can help.
Please be noted that these APIs are target to streams. You need convert your buffer/string in the request body to a stream. You can refer to How to wrap a buffer as a stream2 Readable stream?
For createFileFromStream :
fileService.createFileFromStream(req.headers.sharename,
req.headers.directorypath,
req.headers.filename,
requestStream,
data.length,
function (error, result, resp) {
res.statusCode = error ? 500 : 200;
res.end();
}
);
For createWriteStreamToNewFile :
var writeStream = fileService.createWriteStreamToNewFile(req.headers.sharename,
req.headers.directorypath,
req.headers.filename,
data.length);
requestStream.pipe(writeStream);
3. There are several issues in your code
console.log(body); // This giving the result as undefined
The reason is you define var body and it is undefined. The code body += chunk will still make body undefined.
fileService.createFileFromStream(shareName, dirPath, fileName, body, length, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
}else{
response.send(statusCodes.OK, "Error!");
}
});
When error happens in createFileFromStream, it could also be an error in the network transfer, you might also want to return the error code instead of statusCodes.OK.

Accessing the raw file stream from a node-formidable file upload

I am creating an application that takes some file uploads and send them straight up to S3. I would prefer not to even have the tmp file on my server, so I am using the Knox module and would like to take the raw stream from Formidable and send it over Knox to S3. I have done something similar using Knox to download a file using this code:
knox.downloads.get(widget.download).on('response',function(sres){
res.writeHead(200, {
'Content-Type':'application/zip',
'Content-Length': sres.headers['content-length'],
'Content-Disposition':'attachment; filename=' + widget.download
});
util.pump(sres, res);
}).end();
Now I would like to do something similar in the oposite direction (File upload from the browser to S3).
So far I have written an event handler to capture each piece of data from the file as it's being uploaded:
var form = new formidable.IncomingForm();
form.onPart = function(part){
if(!part.filename){
form.handlePart(part);
}else{
if(part.name == 'download'){
// Upload to download bucket
controller.putDownload(part);
}else{
// Upload to the image bucket
controller.putImage(part);
}
//res.send(sys.inspect(part));
}
}
form.parse(req, function(err, fields, files){
if(err){
res.json(err);
}else{
res.send(sys.inspect({fields:fields, files:files}), {'content-type':'text/plain'});
//controller.createWidget(res,fields,files);
}
});
controller.putDownload = function(part){
part.addListener('data', function(buffer){
knox.download.putStream(data,part.filename, function(err,s3res){
if(err)throwError(err);
else{
console.log(s3res);
}
});
})
knox.downloads.putStream(part, part.filename, function(err,s3res){
if(err)throwError(err);
else{
console.log(s3res);
}
});
}
But the data event only give me the buffer. So is it possible to capture the stream itself and push it to S3?
What you want to do is override the Form.onPart method:
IncomingForm.prototype.onPart = function(part) {
// this method can be overwritten by the user
this.handlePart(part);
};
Formidable's default behavior is to write the part to a file. You don't want that. You want to handle the 'part' events to write to the knox download. Start with this:
form.onPart = function(part) {
if (!part.filename) {
// let formidable handle all non-file parts
form.handlePart(part);
return;
}
Then open the knox request and handle the raw part events yourself:
part.on('data', function(data) {
req.write(data);
});
part.on('end', function() {
req.end();
});
part.on('error', function(err) {
// handle this too
});
As a bonus, if the req.write(data) return false that means the send buffer is full. You should pause the Formidable parser. When you get a drain event from the Knox stream you should resume Formidable.
Use multiparty instead. It supports this kind of streaming like you want. It even has an example of streaming directly to s3: https://github.com/superjoe30/node-multiparty/blob/master/examples/s3.js
In an Express middleware, I use formidable together with PassThrough to stream-upload a file to S3 (in my case, to Minio which is S3 compatible through Minio SDK; and I believe it works for AWS S3 too with the same Minio SDK)
Here is the sample code.
const formidable = require('formidable')
const { PassThrough } = require('stream')
const form = new formidable.IncomingForm()
const pass = new PassThrough()
const fileMeta = {}
form.onPart = part => {
if (!part.filename) {
form.handlePart(part)
return
}
fileMeta.name = part.filename
fileMeta.type = part.mime
part.on('data', function (buffer) {
pass.write(buffer)
})
part.on('end', function () {
pass.end()
})
}
form.parse(req, err => {
if (err) {
req.minio = { error: err }
next()
} else {
handlePostStream(req, next, fileMeta, pass)
}
})
And handlePostStream looks like below, for your reference:
const uuidv1 = require('uuid/v1')
const handlePostStream = async (req, next, fileMeta, fileStream) => {
let filename = uuidv1()
try {
const metaData = {
'content-type': fileMeta.type,
'file-name': Buffer.from(fileMeta.name).toString('base64')
}
const minioClient = /* Get Minio Client*/
await minioClient.putObject(MINIO_BUCKET, filename, fileStream, metaData)
req.minio = { post: { filename: `${filename}` } }
} catch (error) {
req.minio = { error }
}
next()
}
You can find the source code on GitHub, and its unit tests too.
There is no way for you to capture the stream, because the data has to be translated by Formidable. The buffer you're given is the file contents in chunks of buffer.length: this might be a problem because looking at Formidable's docs it appears that until the file is completely uploaded it can't reliably report the file size and Knox's put method might need that.
Never used Knox this way before, but you might have some luck with something like this:
controller.putDownload = function(part){
var req = knox.download.put(part.filename, {
'Content-Type': 'text/plain'
});
part.addListener('data', function(buffer){
req.write(buffer);
});
req.on('response', function(res){
// error checking
});
req.end();
}
A little unsure about the response checking bits, but....see if you can whip that into shape. Also, Streaming an octet stream from request to S3 with knox on node.js also has a writeup that may be useful to you.

Resources