Updating a txt file in AWS S3 - Converting Uint8Array to String - string

I am trying to create a feedback button for my app. It basically just writes to a text file but it currently replaces the existing text file with the new feedback string. So basically I need to get the current text file and concatenate the text inside that text file with the new feedback so it doesn't overwrite anything.
Problem is whenever I get this text file it gives me what appears to be a Uint8Arrayobject:
The last line of these logs is the String to be added to the text file. Right above that is the object which was returned from the call to aws. As you can see it is some object and not a string. So in my .txt file it is uploading a [object Object]. The second log leads me to believe that the object is a Uint8Array because of the Body.
My code looks something like this:
function uploadLog(message, key) {
var moment = {key: key};
var deferred = $q.defer();
awsServices.getObject(key).then(function(data) {
console.log("NEW MESSAGE");
console.log(data);
data = new TextDecoder("utf-8").decode(data.Body);
// data = Utf8ArrayToStr(data.Body);
console.log(data);
console.log(message);
newMessage = message.toString() + '\r\n\r\n' + data;
var blob = new Blob([newMessage.toString()], {type: "text"});
var file = new File([blob], key);
awsServices.upload(file, moment.key, moment).then(function() {
deferred.resolve();
}, function(error) {
console.log("UPLOAD LOG REJECT");
deferred.reject();
});
}, function(error) {
console.log("ERROR");
console.log(error);
deferred.reject();
});
return deferred.promise;
};
I am basically just getting the text file from AWS, reading it, concatating the new string onto the file and then reuploading it.
Now I have tried a variety of functions to convert a Uint8Array to a String such as this one:
https://ourcodeworld.com/articles/read/164/how-to-convert-an-uint8array-to-string-in-javascript
and this one:
Conversion between UTF-8 ArrayBuffer and String
Niether seems to work(Still prints as an [object Object] and I have also tried TextDecoder.
Has anyone ever updated text or a log file in AWS S3? How did you do it? I have a slight suspicious that this file is not a Uint8Array file otherwise these conversion methods would have worked.
EDIT:
awsServices.getObject(key).then(function(data) {
console.log("UPLOAD LOG");
console.log(typeof(data)); //object
console.log(data); //*Screenshot above
console.log(data instanceof Blob); //false
console.log(data instanceof ReadableStream); //false
console.log(data instanceof Object); //true
data = uintToString(data.Body);
console.log(typeof(data)); //string
console.log(data); //[object Object]
newMessage = message.toString() + '\r\n\r\n' + data;
var blob = new Blob([newMessage.toString()], {type: "text"});
var file = new File([blob], key);
awsServices.upload(file, moment.key, moment).then(function() {
deferred.resolve();
}, function(error) {
console.log("UPLOAD LOG REJECT");
deferred.reject();
});
}, function(error) {

Ensure that the S3 object metadata has the correct content type: set Content-Type = text/plain.
Then use data.Body.toString('ascii'), or some other encoding such as utf-8.

Related

Nodejs . Getting [object,Object] when fs.writeFileSync

I am reading a csv file, then writing another in the format I need. console.log shows the data i need but the file I create shows [object, Object] for each line.
I am not sure why the console.log shows the correct data but the file does not. I've read over the Node documentation but I cannot figure this out. Any information you can provide is appreciated.
this is what console.log shows
var fs = require("fs");
var parse = require('csv-parse');
//read file
var inputFile = 'EXAMPLE_UPSdailyShipment.csv';
fs.createReadStream('EXAMPLE_UPSdailyShipment.csv', "utf8", function read(err, data) {
if (err) {
throw err;
}
content = data;
});
var arr = [];
//data for new file
var parser = parse({
delimiter: ","
}, function(err, data) {
data.forEach(function(column) {
// create line object, data from parsed fields, PackageReference1 is order#
var line = {
"BuyerCompanyName": " ",
"OrderNumber": column[8],
"ShipperName": "UPS",
"TrackingNumber": column[9],
"DateShipped": column[0],
"ShipmentCost": column[2],
};
arr.push(line);
});
console.log(arr);
fs.writeFileSync("newtemp3.csv", arr, 'utf8');
console.log("file read and new file created")
});
fs.createReadStream(inputFile).pipe(parser);
I think you just need to stringify the data first:
fs.writeFileSync("newtemp3.csv", JSON.stringify(arr), 'utf8');
Hopefully this solves your problem.

update a json in node

{
aps: []
}
I read it like this:
let apartments = require("path to json file);
apartments.aps.push(apa); // apa is a valid object
fs.writeFile("path", JSON.stringify(apartments));
aps will contain objs like this
{ "id":0, "address": "something"}
when I push in my json file I see
[object Object]
Because apas is a string. JSON is a text format for representing JavaScript Objects (hence the name); you'll need to parse it (using a suitable library) before you can use it as an object.
Here's a simple working example:
const fs = require('fs');
const data = require('./message.json');
// add new value
data.new = 'new value';
fs.writeFile('message.json', JSON.stringify(data), (err) => {
if (err) throw err;
console.log('The file has been saved!');
});
Original content:
{"a":1,"b":2}
Modified content:
{"a":1,"b":2,"new":"new value"}

Why is the file created from S3 stream empty?

I am trying to access a file in a private S3 bucket from a lambda function identified by Cognito.
Reading the stream works outside a lambda but not inside a lambda
Creating a pre-signed url works inside a lambda
Waiting for the the content to be ready as a string works inside a lambda
I've managed to get a pre-signed url to download the file. Using the same parameters, I've tried to write the read stream to a local file. A file gets created but it's empty. I couldn't catch any error in the process.
const s3 = new AWS.S3({ apiVersion: 'latest' });
const file = 's3Filename.csv'
const userId = event.requestContext.identity.cognitoIdentityId;
const s3Params = {
Bucket: 'MY_BUCKET',
Key: `private/${userId}/${file}`,
};
var fileStream = require('fs').createWriteStream('/path/to/my/file.csv');
var s3Stream = s3.getObject(s3Params).createReadStream();
// Try to print s3 stream errors
s3Stream
.on('error', function (err) {
console.error(err); // prints nothing
});
// Try to print fs errors
s3Stream
.pipe(fileStream)
.on('error', function (err) {
console.error('File Stream:', err); // prints nothing
})
.on('data', function (chunk) {
console.log(chunk); // prints nothing
})
.on('end', function () {
console.log('All the data in the file has been read'); // prints nothing
})
.on('close', function (err) {
console.log('Stream has been Closed'); // prints nothing
});
I am quite confident that my parameters are correct because I can get a pre-signed url that allows me to download the file.
console.log(s3.getSignedUrl('getObject', s3Params));
I can also read the file content using getObject().promise(). This could work but I'm parsing a CSV file and I'd rather go easy on the memory and parse the stream.
try
{
const s3Response = await s3.getObject(s3Params).promise();
let objectData = s3Response.Body.toString('utf-8');
console.log(objectData);
}
catch (ex)
{
console.error(ex);
}
Why is the file created from S3 stream empty? And why is there nothing that prints?
Could it be an access policy issue? If that's the case, why didn't I get any error when executing?

Uploading a file in Azure File Storage using node.js

We are trying create an webservice to upload files to Azure file storage using node.js service.
Below is the node.js server code.
exports.post = function(request, response){
var shareName = request.headers.sharename;
var dirPath = request.headers.directorypath;
var fileName = request.headers.filename;
var body;
var length;
request.on("data", function(chunk){
body += chunk;
console.log("Get data");
});
request.on("end", function(){
try{
console.log("end");
var data = body;
length = data.length;
console.log(body); // This giving the result as undefined
console.log(length);
fileService.createFileFromStream(shareName, dirPath, fileName, body, length, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
}else{
response.send(statusCodes.OK, "Error!");
}
});
}catch (er) {
response.statusCode = 400;
return res.end('error: ' + er.message);
}
});
}
Below is our client to upload a file.
private static void sendPOST() throws IOException {
URL obj = new URL("https://crowdtest-fileservice.azure-mobile.net/api/files_stage/");
HttpURLConnection con = (HttpURLConnection) obj.openConnection();
con.setRequestMethod("POST");
con.setRequestProperty("sharename", "newamactashare");
con.setRequestProperty("directorypath", "MaheshApp/TestLibrary/");
con.setRequestProperty("filename", "temp.txt");
Path path = Paths.get("C:/Users/uma.maheshwaran/Desktop/Temp.txt");
byte[] data = Files.readAllBytes(path);
// For POST only - START
con.setDoOutput(true);
OutputStream os = con.getOutputStream();
os.write(data);
os.flush();
os.close();
// For POST only - END
int responseCode = con.getResponseCode();
System.out.println("POST Response Code :: " + responseCode);
if (responseCode == HttpURLConnection.HTTP_OK) { // success
BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()));
String inputLine;
StringBuffer response = new StringBuffer();
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
System.out.println(inputLine);
}
in.close();
// print result
System.out.println(response.toString());
} else {
BufferedReader br = new BufferedReader(new InputStreamReader(con.getErrorStream()));
String line = "";
while ((line = br.readLine()) != null) {
System.out.println(line);
}
System.out.println("POST request not worked");
}
}
It is showing the error
The request 'POST /api/files_stage/' has timed out. This could be
caused by a script that fails to write to the response, or otherwise
fails to return from an asynchronous call in a timely manner.
Updated:
I have also tried below code.
var body = new Object();
body = request.body;
var length = body.length;
console.log(request.body);
console.log(body);
console.log(length);
try {
fileService.createFileFromStream(shareName, dirPath, fileName, body, length, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
}else{
response.send(statusCodes.OK, "Error!");
}
});
} catch (ex) {
response.send(500, { error: ex.message });
}
But facing the issue
{"error":"Parameter stream for function createFileFromStream should be
an object"}
I am new to node.js. Please help me to fix this.
There are several issue here. Let us go over them one by one.
1. In your Java client you cannot just dump the binary data into an Azure mobile service connection.
The reason for this is that an Azure mobile service has two body parsers that ensure that no matter what, the request body is parsed for you.
So, while you can walk around the Express body parser by specifying an uncommon content type, you will still hit the Azure body parser that will mess up your data stream by naively assuming that it is a UTF-8 string.
The only option therefore is to skip the Express parser by specifying a content type it cannot handle and then play along with the Azure parser by encoding your binary data with Base64 encoding.
So, in the Java client replace
Path path = Paths.get("C:/Users/uma.maheshwaran/Desktop/Temp.txt");
byte[] data = Files.readAllBytes(path);
with
con.setRequestProperty("content-type", "binary");
Path path = Paths.get("C:/Users/uma.maheshwaran/Desktop/Temp.txt");
byte[] data = Files.readAllBytes(path);
data = Base64.getEncoder().encode(data);
If you are not on Java 8, replace the java.util.Base64 encoder with any other Base64 encoder you have access to.
2. The createFileFromStream Azure storage api function you are trying to use expects a stream.
At the same time, the best you can get when parsing a request body manually is a byte array. Unfortunately, Azure mobile services use NodeJS version 0.8, which means there is no easy way to construct a readable stream from a byte array, and you you will have to assemble your own stream suitable for Azure storage api. Some duct tape and stream#0.0.1 should do just fine.
var base64 = require('base64-js'),
Stream = require('stream'),
fileService = require('azure-storage')
.createFileService('yourStorageAccount', 'yourStoragePassword');
exports.post = function (req, res) {
var data = base64.toByteArray(req.body),
buffer = new Buffer(data),
stream = new Stream();
stream['_ended'] = false;
stream['pause'] = function() {
stream['_paused'] = true;
};
stream['resume'] = function() {
if(stream['_paused'] && !stream['_ended']) {
stream.emit('data', buffer);
stream['_ended'] = true;
stream.emit('end');
}
};
try {
fileService.createFileFromStream(req.headers.sharename, req.headers.directorypath,
req.headers.filename, stream, data.length, function (error, result, resp) {
res.statusCode = error ? 500 : 200;
res.end();
}
);
} catch (e) {
res.statusCode = 500;
res.end();
}
};
These are the dependencies you need for this sample.
"dependencies": {
"azure-storage": "^0.7.0",
"base64-js": "^0.0.8",
"stream": "0.0.1"
}
If specifying them in your service's package.json does not work you can always go to this link and install them manually via the console.
cd site\wwwroot
npm install azure-storage
npm install base64-js
npm install stream#0.0.1
3. To increase the default upload limit of 1Mb, specify MS_MaxRequestBodySizeKB for your service.
Do keep in mind though that since you are transferring you data as Base64-encoded you have to account for this overhead. So, to support uploading files up to 20Mb in size, you have to set MS_MaxRequestBodySizeKB to roughly 20 * 1024 * 4 / 3 = 27307.
I find the easiest way is to use pkgcloud which abstracts the differences between cloud providers and also provides a clean interface for uploading and downloading files. It uses streams so the implementation is memory efficient as well.
var pkgcloud = require('pkgcloud')
var fs = require('fs')
var client = pkgcloud.storage.createClient({
provider: 'azure',
storageAccount: 'your-storage-account',
storageAccessKey: 'your-access-key'
});
var readStream = fs.createReadStream('a-file.txt');
var writeStream = client.upload({
container: 'your-storage-container',
remote: 'remote-file-name.txt'
});
writeStream.on('error', function (err) {
// handle your error case
});
writeStream.on('success', function (file) {
// success, file will be a File model
});
readStream.pipe(writeStream);
We can leverage this answer of the thread on SO How to send an image from Android client to Node.js server via HttpUrlConnection?, which create a custom middleware to get the upload file content into a buffer array, then we can use createFileFromText() to store the file in Azure Storage.
Here is the code snippet:
function rawBody(req, res, next) {
var chunks = [];
req.on('data', function (chunk) {
chunks.push(chunk);
});
req.on('end', function () {
var buffer = Buffer.concat(chunks);
req.bodyLength = buffer.length;
req.rawBody = buffer;
next();
});
req.on('error', function (err) {
console.log(err);
res.status(500);
});
}
router.post('/upload', rawBody,function (req, res){
fileService.createShareIfNotExists('taskshare', function (error, result, response) {
if (!error) {
// if result = true, share was created.
// if result = false, share already existed.
fileService.createDirectoryIfNotExists('taskshare', 'taskdirectory', function (error, result, response) {
if (!error) {
// if result = true, share was created.
// if result = false, share already existed.
try {
fileService.createFileFromText('taskshare', 'taskdirectory', 'test.txt', req.rawBody, function (error, result, resp) {
if (!error) {
// file uploaded
res.send(200, "File Uploaded");
} else {
res.send(200, "Error!");
}
});
} catch (ex) {
res.send(500, { error: ex.message });
}
}
});
}
});
})
router.get('/getfile', function (req, res){
fileService.createReadStream('taskshare', 'taskdirectory', 'test.txt').pipe(res);
})
When the request arrives at the function defined in exports.post, the whole request is already there, so you don't need to buffer it. You can simplify it by writing something along the lines of the code below.
exports.post = function(request, response){
var shareName = request.headers.sharename;
var dirPath = request.headers.directorypath;
var fileName = request.headers.filename;
var body = request.body;
var length = body.length;
console.log(length);
try {
fileService.createFileFromText(shareName, dirPath, fileName, body, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
} else {
response.send(statusCodes.OK, "Error!");
}
});
} catch (ex) {
response.send(500, { error: ex.message });
}
}
There are several things:
1. createFileFromText can work with plain text. But it will fail for those binary content, as it uses UTF-8 encoding.
You might want to refer to the similar issue for blob at: Saving blob (might be data!) returned by AJAX call to Azure Blob Storage creates corrupt image
2. The createFileFromStream or createWriteStreamToExistingFile \ createWriteStreamToNewFile Azure storage API may be the function can help.
Please be noted that these APIs are target to streams. You need convert your buffer/string in the request body to a stream. You can refer to How to wrap a buffer as a stream2 Readable stream?
For createFileFromStream :
fileService.createFileFromStream(req.headers.sharename,
req.headers.directorypath,
req.headers.filename,
requestStream,
data.length,
function (error, result, resp) {
res.statusCode = error ? 500 : 200;
res.end();
}
);
For createWriteStreamToNewFile :
var writeStream = fileService.createWriteStreamToNewFile(req.headers.sharename,
req.headers.directorypath,
req.headers.filename,
data.length);
requestStream.pipe(writeStream);
3. There are several issues in your code
console.log(body); // This giving the result as undefined
The reason is you define var body and it is undefined. The code body += chunk will still make body undefined.
fileService.createFileFromStream(shareName, dirPath, fileName, body, length, function(error, result, resp) {
if (!error) {
// file uploaded
response.send(statusCodes.OK, "File Uploaded");
}else{
response.send(statusCodes.OK, "Error!");
}
});
When error happens in createFileFromStream, it could also be an error in the network transfer, you might also want to return the error code instead of statusCodes.OK.

Node.js express + busboy file type check

I'm trying to realize a file upload in Node.js 4.x using express with busboy.
I'm already able to upload files and store them in an Azure Blob Storage.
No I'd like to verify the file type, before storing it to Azure, and reject any file which is not valid.
I'd like to do the validation using magic numbers. I found
const fileType = require('file-type'); which determines the file type for me.
Now I'm trying to get this work as efficient as possible, but here is where I'm struggling:
I want to directly pipe the file stream to azure. But before that, I need to read the first 5 bytes from the stream into a buffer which is processed by file-type.
Reading from stream and then piping to azure surely does not work. After some research I found a solution by piping the file in 2 PassThrough streams. But now I'm struggling in correctly handling those 2 streams.
const fileType = require('file-type');
const pass = require('stream').PassThrough;
//...
req.busboy.on('file', function (fieldname, file, filename) {
console.log("Uploading: " + filename);
var b = new pass;
var c = new pass;
file.pipe(b);
file.pipe(c);
var type = null;
b.on('readable', function() {
b.pause();
if(type === null) {
var chunk = b.read(5);
type = fileType(chunk) || false;
b.end();
}
});
b.on('finish', function() {
if(type && ['jpg', 'png', 'gif'].indexOf(type.ext) !== -1) {
var blobStream = blobSvc.createWriteStreamToBlockBlob(storageName,
blobName,
function (error) {
if (error) console.log('blob upload error', error);
else console.log('blob upload complete')
});
c.pipe(blobStream);
}
else {
console.error("Rejected file of type " + type);
}
});
});
This solution sometimes works - and sometimes there is some "write after end" error.
Also, I think the streams are not properly closed, because normally, after a request, express logs something like this on console:
POST /path - - ms - -
But this log message now comes like 30s-60s after "blob upload complete", probably due to some timeout.
Any idea how to fix this?
You don't need to add additional streams into the mix. Just unshift() the consumed portion back onto the stream. For example:
const fileType = require('file-type');
req.busboy.on('file', function (fieldname, file, filename) {
function readFirstBytes() {
var chunk = file.read(5);
if (!chunk)
return file.once('readable', readFirstBytes);
var type = fileType(chunk);
if (type.ext === 'jpg' || type.ext === 'png' || type.ext === 'gif') {
const blobStream = blobSvc.createWriteStreamToBlockBlob(
storageName,
blobName,
function (error) {
if (error)
console.log('blob upload error', error);
else
console.log('blob upload complete');
}
);
file.unshift(chunk);
file.pipe(blobStream);
} else {
console.error('Rejected file of type ' + type);
file.resume(); // Drain file stream to continue processing form
}
}
readFirstBytes();
});

Resources