Get Full Object From Oracle Object Storage using Node.js - node.js

I am working on an nodejs image server to read and write images on Oracle Object Storage.
The issue I am having is not getting the full image when using the function getObject using the Javascript api from oci-objectstorage
I have succesfully stored the following images.
1x1 image with the size of 70 bytes and another
5120 x 3200 image with size 2.9 MB
When I use the function getObject I am able to retrieve the full 1x1 image but when I attempt it with the 5120 x 3200 image, I can only get 15KB of 2.9MB
I used the following example from Oracle
https://github.com/oracle/oci-typescript-sdk/blob/master/examples/javascript/objectstorage.js
Below is the code that I am using to read the image from Oracle Object Storage
I have the below code in an async function.
router.get('/data/', async function (req, res, next) {
let path = req.query.image_data
fs.access(imagePath, fs.F_OK, async (err) => {
if (err) {
const provider = new common.ConfigFileAuthenticationDetailsProvider();
const client = new os.ObjectStorageClient({
authenticationDetailsProvider: provider
});
const compartmentId = config.COMPARTMENTID
const bucket = config.BUCKET
const request = {};
const response = await client.getNamespace(request);
const namespace = response.value;
const getObjectRequest = {
objectName: imagePath,
bucketName: bucket,
namespaceName: namespace
};
const getObjectResponse = await client.getObject(getObjectRequest);
const head = getObjectResponse.value._readableState.buffer.head.data.toString('base64')
const tail = getObjectResponse.value._readableState.buffer.tail.data.toString('base64')
await fs.writeFile(imagePath, completeImage, {encoding: 'base64'},function(err) {
if (err) return
res.sendFile(path, {root: './imagefiles'}) //using express to serve the image file
});
}
//file exists
res.sendFile(path, {root: './imagefiles'});
})
})
It seems to me that the head and tail both have the same data. I am trying to then write the image using fs.write which then with the large image only write a small portion of the image while with the small 1x1 image it writes the full image.
I am not sure if its an issue with my use of the async/await setup or I may have to use a better implementation using promises that may allow to download the full image.
Any ideas on how to tackle this?
Another small issue I am having is serving the image after writing it. On the webpage I get an error saying could not display the image because it contains errors. But after I refresh the page again, which finds the image since it now exists on disk, it is able to display the image and does not show the previous error.

Related

How to display images of products stored on aws s3 bucket

I was practicing on this tutorial
https://www.youtube.com/watch?v=NZElg91l_ms&t=1234s
It is working absolutely like a charm for me but the thing is I am storing images of products I am storing them in bucket and lets say I upload 4 images they all are uploaded.
but when I am displaying them i got access denied error as I am displaying the list and repeated request are maybe detecting it as a spam
This is how i am trying to fetch them on my react app
//rest of data is from mysql datbase (product name,price)
//100+ products
{ products.map((row)=>{
<div className="product-hero"><img src=`http://localhost:3909/images/${row.imgurl}`</div>
<div className="text-center">{row.productName}</div>
})
}
as it fetch 100+ products from db and 100 images from aws it fails
Sorry for such detailed question but in short how can i fetch all product images from my bucket
Note I am aware that i can get only one image per call so how can I get all images one by one in my scenario
//download code in my app.js
const { uploadFile, getFileStream } = require('./s3')
const app = express()
app.get('/images/:key', (req, res) => {
console.log(req.params)
const key = req.params.key
const readStream = getFileStream(key)
readStream.pipe(res)
})
//s3 file
// uploads a file to s3
function uploadFile(file) {
const fileStream = fs.createReadStream(file.path)
const uploadParams = {
Bucket: bucketName,
Body: fileStream,
Key: file.filename
}
return s3.upload(uploadParams).promise()
}
exports.uploadFile = uploadFile
// downloads a file from s3
function getFileStream(fileKey) {
const downloadParams = {
Key: fileKey,
Bucket: bucketName
}
return s3.getObject(downloadParams).createReadStream()
}
exports.getFileStream = getFileStream
It appears that your code is sending image requests to your back-end, which retrieves the objects from Amazon S3 and then serves the images in response to the request.
A much better method would be to have the URLs in the HTML page point directly to the images stored in Amazon S3. This would be highly scalable and will reduce the load on your web server.
This would require the images to be public so that the user's web browser can retrieve the images. The easiest way to do this would be to add a Bucket Policy that grants GetObject access to all users.
Alternatively, if you do not wish to make the bucket public, you can instead generate Amazon S3 pre-signed URLs, which are time-limited URLs that provides temporary access to a private object. Your back-end can calculate the pre-signed URL with a couple of lines of code, and the user's web browser will then be able to retrieve private objects from S3 for display on the page.
I did sililar S3 image handling while I handle my blog's image upload functionality, but I did not use getFileStream() to upload my image.
Because nothing should be done until the image file is fully processed, I used fs.readFile(path, callback) instead to read the data.
My way will generate Buffer Data, but AWS S3 is smart enough to know to intercept this as image. (I have only added suffix in my filename, I don't know how to apply image headers...)
This is my part of code for reference:
fs.readFile(imgPath, (err, data) => {
if (err) { throw err }
// Once file is read, upload to AWS S3
const objectParams = {
Bucket: 'yuyuichiu-personal',
Key: req.file.filename,
Body: data
}
S3.putObject(objectParams, (err, data) => {
// store image link and read image with link
}
}

How can I temporarily store a pdf in Firebase filesystem?

I am creating a pdf using JSPDF on server-side, in NodeJS. Once done, I want to create a new folder for the user in Google Drive, upload the pdf to said folder, and also send it to the client-side (browser) for the user to view.
There are two problems that I'm encountering. Firstly, if I send the pdf in the response -via pdf.output()- the images don't display correctly. They are distorted, as though each row of pixels is offset by some amount. A vertical line "|" instead renders as a diagonal "\". An example is shown below.
Before
After
My workaround for this was to instead save it to the filesystem using doc.save() and then send it to the browser using fs.readFileSync(filepath).
However, I've discovered that when running remotely, I don't have file permissions to be saving the pdf and reading it. And after some research and tinkering, I'm thinking that I cannot change these permissions. This is the error I get:
Error: EROFS: read-only file system, open './temp/output.pdf'
at Object.openSync (fs.js:443:3)
at Object.writeFileSync (fs.js:1194:35)
at Object.v.save (/workspace/node_modules/jspdf/dist/jspdf.node.min.js:86:50626)
etc...
So I have this JSPDF object, and I believe I need to either, alter the permissions to allow writing/reading or take the jspdf object or, I guess, change it's format to one accepted by Google drive, such as a stream or buffer object?
The link below leads me to think these permissions can't be altered since it states: "These files are available in a read-only directory".
https://cloud.google.com/functions/docs/concepts/exec#file_system
I also have no idea 'where' the server filesystem is, or how to access it. Thus, I think the best course of action is to look at sending the pdf in different formats.
I've checked jsPDF documentation for types that pdf.output() can return. These include string, arraybuffer, window, blob, jsPDF.
https://rawgit.com/MrRio/jsPDF/master/docs/jsPDF.html#output
My simplified code is as follows:
const express = require('express');
const fs = require('fs');
const app = express();
const { jsPDF } = require('jspdf');
const credentials = require(credentialsFilepath);
const scopes = [scopes in here];
const auth = new google.auth.JWT(
credentials.client_email, null,
credentials.private_key, scopes
);
const drive = google.drive({version: 'v3', auth});
//=========================================================================
app.post('/submit', (req, res) => {
var pdf = new jsPDF();
// Set font, fontsize. Added some text, etc.
pdf.text('blah blah', 10, 10);
// Add image (signature) from canvas, which is passed as a dataURL
pdf.addImage(img, 'JPEG', 10, 10, 50, 20);
pdf.save('./temp/output.pdf');
drive.files.create({
resource: folderMetaData,
fields: 'id'
})
.then(response => {
// Store pdf in newly created folder
var fileMetaData = {
'name': 'filename.pdf',
'parents': [response.data.id],
};
var media = {
mimeType: 'application/pdf',
body: fs.createReadStream('./temp/output.pdf'),
};
drive.files.create({
resource: fileMetaData,
media: media,
fields: 'id'
}, function(err, file) {
if(err){
console.error('Error:', err);
}else{
// I have considered piping 'file' back in the response here but can't figure out how
console.log('File uploaded');
}
});
})
.catch(error => {
console.error('Error:', error);
});
// Finally, I attempt to send the pdf to client/browser
res.setHeader('Content-Type', 'application/pdf');
res.send(fs.readFileSync('./temp/output.pdf'));
})
Edit: After some more searching, I've found a similar question which explains that the fs module is for reading/writing to local filestore.
EROFS error when executing a File Write function in Firebase
I eventually came to a solution after some further reading. I'm not sure who this will be useful for, but...
Turns out the Firebase filesystem only has 1 directory which allows you to write to (the rest are read-only). This directory is named tmp and I accessed it using the tmp node module [installed with: npm i tmp], since trying to manually reference the path with pdf.save('./tmp/output.pdf') didn't work.
So the only changes to my code were to add in the lines:
var tmp = require('tmp');
var tmpPath = tmp.tmpNameSync();
and then replacing all the instances of './temp/output.pdf' with tmpPath

Generating QR codes in serverless with aws-nodejs template

I need to write a lambda function and send a number in the api request field to genrate the number of QR codes and store them in a S3 bucket.I am using the serverless framework with the aws-nodejs template.
To describe the task briefly lets say I get a number input in the api request PathParameters and based on these number I have to generate those number of QR's using the qr npm package and then store these generated qr's in the s3 bucket
this is what i have been able to do so far.
module.exports.CreateQR = (event,context) =>{
const numberOfQR = JSON.parse(event.pathParameters.number) ;
for(let i=0;i<numberOfQR;i++){
var d= new Date();
async function createQr(){
let unique, cipher, raw, qrbase64;
unique = randomize('0', 16);
cipher = key.encrypt(unique);
raw = { 'version': '1.0', data: cipher, type: 'EC_LOAD'}
// linkArray.forEach( async (element,index) =>{
let qrcode = await qr.toDataURL(JSON.stringify(raw));
console.log(qrcode);
// fs.writeFileSync('./qr.html', `<img src="${qrcode}">`)
const params = {
Bucket:BUCKET_NAME,
Key:`QR/${d}/img${i+1}.jpg`,
Body: qrcode
};
s3.upload(params , function(err,data){
if(err){
throw err
}
console.log(`File uploaded Successfully .${data.Location}`);
});
}
createQr();
}
};
I have been able to upload a given number of images to the bucket but the issue i am facing is the images are not going in the order.I think the problem is with the asynchronous code. Any idea how to solve this issue
that's because you're not awaiting the s3 to upload, but instead you have a callback.
you should use the .promise of s3 and then await it, so you'll wait the file to be uploaded before move to the next one
I changed the example of code
See docs:
https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property
https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#promise-property
// see the async keyword before the lambda function
// we need it for use the await keyword and wait for a task to complete before continue
module.exports.CreateQR = async (event,context) =>{
const numberOfQR = JSON.parse(event.pathParameters.number) ;
// moved your function out of the loop
function createQr(){
// ...
// here we call .promise and return it so we get a Task back from s3
return s3.upload(params).promise();
}
for(let i=0;i<numberOfQR;i++){
// ....
// here we await for the task, so you will get the images being created and uploaded in order
await createQr();
}
};
hope it guide you towards a solution.

Uploading data from firebase functions to firebase storage?

I have a website running with node.js, with the backend running on Firebase Functions. I want to store a bunch of JSON to Firebase Storage. The below snippet works just fine when I'm running on localhost, but when I upload it to Firebase functions, it says Error: EROFS: read-only file system, open 'export-stock-trades.json. Anyone know how to get around this?
fs.writeFile(fileNameToReadWrite, JSON.stringify(jsonObjToUploadAsFile), function(err){
bucket.upload(fileNameToReadWrite, {
destination: destinationPath,
});
res.send({success: true});
});
I can't tell for sure, since much of the context of your function is missing, but it looks like you function is attempting to write a file to local disk first (fs.writeFile), then upload it (bucket.upload).
On Cloud Functions, code you write only has write access to /tmp,
which is os.tmpdir() in node. Read more about that in the
documentation:
The only writeable part of the filesystem is the /tmp directory, which
you can use to store temporary files in a function instance. This is a
local disk mount point known as a "tmpfs" volume in which data written
to the volume is stored in memory. Note that it will consume memory
resources provisioned for the function.
This is probably what's causing your code to fail.
Incidentally, if the data you want to upload is in memory, you don't have to write it to a file first as you're doing now. You can instead use file.save() for that.
Another way I feel this could work is to convert the JSON file into a buffer and then perform an action like this (the code snippet below). I wrote an article on how you can do this using Google Cloud Storage but it works fine with Firebase storage. The only thing you need to change is the "service-account-key.json" file.
The link to the article can be found here: Link to article on medium
const util = require('util')
const gc = require('./config/')
const bucket = gc.bucket('all-mighti') // should be your bucket name
/**
*
* #param { File } object file object that will be uploaded
* #description - This function does the following
* - It uploads a file to the image bucket on Google Cloud
* - It accepts an object as an argument with the
* "originalname" and "buffer" as keys
*/
export const uploadImage = (file) => new Promise((resolve, reject) => {
const { originalname, buffer } = file
const blob = bucket.file(originalname.replace(/ /g, "_"))
const blobStream = blob.createWriteStream({
resumable: false
})
blobStream.on('finish', () => {
const publicUrl = format(
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
)
resolve(publicUrl)
})
.on('error', () => {
reject(`Unable to upload image, something went wrong`)
})
.end(buffer)
})

Uploading image as Binary Data to cognitive Services with Node

I am trying to pass the Microsoft Cognitive services facial API an image which the user has uploaded. The image is available on the server in the uploads folder.
Microsoft is expecting the image to be 'application/octet-stream' and passed as binary data.
I am currently unable to find a way to pass the image to the API that is satisfactory for it to be accepted and keep receiving "decoding error, image format unsupported". As far as im aware the image must be uploaded in blob or file format but being new to NodeJs im really unsure on how to achieve this.
So far i have this and have looked a few options but none have worked, the other options i tried returned simmilar errors such as 'file too small or large' but when ive manually tested the same image via Postman it works fine.
image.mv('./uploads/' + req.files.image.name , function(err) {
if (err)
return res.status(500).send(err);
});
var encodedImage = new Buffer(req.files.image.data, 'binary').toString('hex');
let addAPersonFace = cognitive.addAPersonFace(personGroupId, personId, encodedImage);
addAPersonFace.then(function(data) {
res.render('pages/persons/face', { data: data, personGroupId : req.params.persongroupid, personId : req.params.personid} );
})
The package it looks like you're using, cognitive-services, does not appear to support file uploads. You might choose to raise an issue on the GitHub page.
Alternative NPM packages do exist, though, if that's an option. With project-oxford, you would do something like the following:
var oxford = require('project-oxford'),
client = new oxford.Client(YOUR_FACE_API_KEY),
uuid = require('uuid');
var personGroupId = uuid.v4();
var personGroupName = 'my-person-group-name';
var personName = 'my-person-name';
var facePath = './images/face.jpg';
// Skip the person-group creation if you already have one
console.log(JSON.stringify({personGroupId: personGroupId}));
client.face.personGroup.create(personGroupId, personGroupName, '')
.then(function(createPersonGroupResponse) {
// Skip the person creation if you already have one
client.face.person.create(personGroupId, personName)
.then(function(createPersonResponse) {
console.log(JSON.stringify(createPersonResponse))
personId = createPersonResponse.personId;
// Associate an image to the person
client.face.person.addFace(personGroupId, personId, {path: facePath})
.then(function (addFaceResponse) {
console.log(JSON.stringify(addFaceResponse));
})
})
});
Please update to version 0.2.0, this should work now.

Resources