Firebase Storage upload file from URL with the admin SDK [duplicate] - node.js

I try to find a way to upload a PDF file, generated by a php/MySQL server to my Google Storage bucket. The URL is simple : www.my_domain.com/file.pdf . I tried with the code below , but I'm having some issues to make it work. The error is : path (fs.createWriteStream(destination)) must be a string or Buffer. Thanks in advance for your help !
const http = require('http');
const fs = require('fs');
const {Storage} = require('#google-cloud/storage')
const gcs = new Storage({
keyFilename: 'my_keyfile.json'
})
const bucket = gcs.bucket('my_bucket.appspot.com');
const destination = bucket.file('file.pdf');
var theURL = 'https://www.my_domain.com/file.pdf';
var download = function() {
var file = fs.createWriteStream(destination);
var request = http.get(theURL, function(response) {
response.pipe(file);
file.on('finish', function() {
console.log("File uploaded to Storage")
file.close();
});
});
}

I finally found a solution :
const http = require('http');
const fs = require('fs');
const {Storage} = require('#google-cloud/storage')
const gcs = new Storage({
keyFilename: 'my_keyfile.json'
})
const bucket = gcs.bucket('my_bucket.appspot.com');
const destination = os.tmpdir() + "/file.pdf";
const destinationStorage = path.join(os.tmpdir(), "file.pdf");
var theURL = 'https://www.my_domain.com/file.pdf';
var download = function () {
var request = http.get(theURL, function (response) {
if (response.statusCode === 200) {
var file = fs.createWriteStream(destination);
response.pipe(file);
file.on('finish', function () {
console.log('Pipe OK');
bucket.upload(destinationStorage, {
destination: "file.pdf"
}, (err, file) => {
console.log('File OK on Storage');
});
file.close();
});
}
});
}

firebase-admin, as of v7.0.0, uses google-cloud/storage v2.3.0, which can no longer accept file URLs on bucket.upload.
I figured I would share my solution as well.
const rq = require('request');
// filePath = File location on google storage bucket
// fileUrl = URL of the remote file
const bucketFile = bucket.file(filePath);
const fileWriteStream = bucketFile.createWriteStream();
let rqPipe = rq(fileUrl).pipe(fileWriteStream);
// And if you want the file to be publicly readable
rqPipe.on('finish', async () => {
await bucketFile.makePublic();
});

Related

How to perform an HTTP post request using express on Cloud Functions for Firebase using busboy

Hi I am trying to insert data in the database using a POST Request but the data is not being inserted.
On further investigation I found that to upload form-data, busboy needs to be used for image upload in firebase functions but I am not able to find a solution for using busboy with post method.
Hence if someone can please help me resolve this issue.
Below is the code for reference.
app.js
const express = require('express')
//const router = true;
const router = new express.Router()
const userInfo = require('../models/userProfile')
const multer = require('multer');
var fs = require('fs');
var path = require('path');
var JSONStream = require('JSONStream');
const planCheck = require('../models/planDetails');
const login = require('../models/login_creditionals');
const {Storage} = require('#google-cloud/storage');
const {format} = require('util');
const busboy = require('busboy');
const storage = new Storage({
projectId: "biz-1",
keyFilename: "/Users/akm/pixNodes/functions/pixbiz-65a402.json"
});
const bucket = storage.bucket("gs://biz-1");
const upload = multer({
storage: multer.memoryStorage(),
limits: {
fileSize: 10 * 1024 * 1024
}
})
const uploadImageToStorage = (file) => {
return new Promise((resolve, reject) => {
if (!file) {
reject('No image file');
}
let newFileName = `${Date.now() + path.extname(file.originalname)}`;
let fileUpload = bucket.file(newFileName);
const blobStream = fileUpload.createWriteStream({
metadata: {
contentType: file.mimetype
}
});
blobStream.on('error', (error) => {
reject('Something is wrong! Unable to upload at the moment.');
});
blobStream.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
const url = format(`https://storage.googleapis.com/${bucket.name}/${fileUpload.name}`);
resolve(url);
});
blobStream.end(file.buffer);
});
}
router.post('/userprofile/check' ,upload.single('profile_pic'), async (req,res) => {
var reqFiles;
var reqFilesUrl;
reqFiles = req.file;
if(reqFiles) {
// const imagerUrl = await uploadImageToStorage(reqFiles)
reqFilesUrl = imagerUrl;
console.log(reqFilesUrl);
const notify = new userInfo({
userId: req.body.userId,
mobile_number : req.body.mobileNumber,
profile_pic: reqFilesUrl
})
try {
console.log('success insert data');
await notify.save((err,post) => {
if(err) {
console.log(err);
}
//console.log('data saved', post);
res.status(201).send(post);
});
// });
// res.status(201).send();
console.log('201');
} catch(e) {
//res.status(401);
return res.send(e);
}

google cloud function not uploading to bucket but no error in function

I have a NodeJS function that writes several small svg files locally and then is attempting to upload those files to cloud bucket.
in the function log, i am only seeing the message that file written to local disk. now will upload. But there is no file in the bucket and no error logged anywhere. i have made sure the timeout is set to 9 min (max) so i am sure its not timing out. what else shoudl i check?
any pointers will be appreciated.
exports.createQRCode = functions.storage.object().onFinalize(async (object) =>{
const qrcodeMonkeyKey = functions.config().qrcodemonkey.key;
//console.log(`key for qrcode monkey is ${qrcodeMonkeyKey}`);
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const contentType = object.contentType; // File content type.
const metageneration = object.metageneration; // Number of times metadata has been generated. New objects have a value of 1.
console.log(fileBucket);
console.log(filePath);
if(!filePath.toLowerCase().endsWith('.csv'))
return console.log('not a csv so no need to anything fancy');
const bucket = admin.storage().bucket(fileBucket);
const filePathComps = filePath.split('/');
const folderName = filePathComps[filePathComps.length-3];
if(folderName !== "qrcode")
return console.log('not a qr code csv so no need to anything fancy');
const fileName = filePathComps[filePathComps.length-1];
console.log(fileName);
const path = require('path');
const os = require('os');
const fs = require('fs');
const tempFilePath = path.join(os.tmpdir(), fileName);
const metadata = {
contentType: contentType,
};
await bucket.file(filePath).download({destination: tempFilePath});
const csv = require('csv-parser')
const results = [];
fs.createReadStream(tempFilePath)
.pipe(csv({headers:
['uri','filename','foldername']
,skipLines:1
}))
.on('data', async (data) => {
const x = data;
results.push(data);
//results.push({id:x.id,phoneNumber:x.phoneNumber,isInternational:x.isInternational,message:x.messageText,respStatus:resp.status,responsedata:resp.data});
})
.on('end',async () => {
pArray = [];
results.forEach(x =>{
pArray.push(createQRCodeAndUpload(qrcodeMonkeyKey,x.filename,x.uri,x.foldername));
});
const finaloutput = await Promise.all(pArray);
console.log(JSON.stringify(finaloutput));
return;
});
});
const createQRCodeAndUpload = async (qrcodeMonkeyKey,fileName, url,foldername) =>{
const bucket = admin.storage().bucket('vmallapp.appspot.com');
const path = require('path');
const os = require('os');
const fs = require('fs');
var axios = require("axios").default;
console.log('processing ' + url);
if(url !==""){
const dataToSend = {
data : url,
config :{
body:'circle',
eye:'frame14',
eyeBall:'ball16',
bodyColor:"#032b5c",
bgColor:"#84d4e2",
"logo":"ae600e1267b9e477f0b635b60ffaec1d1c18d93b.png"
},
size:1200,
download:false,
file:'svg',
gradientOnEyes:true
}
var options = {
method: 'POST',
url: 'https://qrcode-monkey.p.rapidapi.com/qr/custom',
headers: {
'content-type': 'application/json',
'x-rapidapi-host': 'qrcode-monkey.p.rapidapi.com',
'x-rapidapi-key': qrcodeMonkeyKey
},
data: dataToSend
};
var response = await axios.request(options);
console.log('qrcode monkey returned status' + response.status);
const outputFilePath = path.join(os.tmpdir(), `${fileName}.svg`);
fs.writeFileSync(outputFilePath, response.data);
console.log(`${fileName}.svg written to local disk. now will upload`);
try{
await bucket.upload(outputFilePath, {
destination: `qrcode/output/${fileName}.svg`
});
}catch(error){
console.log('error in uploding ' + error);
}
console.log('lets delete the file now and clean up local storage');
fs.unlinkSync(outputFilePath);
return 'all done';
}
}

How to upload images and files to Azure Blob Node.js

I have node.js application with frontend in Angular
I need to upload files and images to Azure blob
I have created container and setup the environment according to MS documentation (https://learn.microsoft.com/en-us/azure/storage/blobs/storage-quickstart-blobs-nodejs )
the v12 version.
My functions works for creating and uploading the created file to Azure blob, I could not figure how do I upload the posted file from the client to Azure Blob, below is my code in Node.js TypeScript
import * as formidable from 'formidable';
import * as fs from 'fs';
const { BlobServiceClient } = require('#azure/storage-blob');
const uuidv1 = require('uuid/v1');
const dotenv = require('dotenv');
dotenv.config();
class BlobController {
private AZURE_STORAGE_CONNECTION_STRING = process.env.CONSTRINGBlob;
constructor(router) {
router.post('/file', this.uploadFile.bind(this));
}
//----Get Lookup tables dynamically-----------//
async uploadFile(req, res) {
const blobServiceClient = await BlobServiceClient.fromConnectionString(this.AZURE_STORAGE_CONNECTION_STRING);
// Create a unique name for the container
//const containerName = 'quickstart' + uuidv1();
const containerName = blobServiceClient.getContainerClient('mycontainer');
console.log('\t', containerName.containerName);
// Get a reference to a container
const containerClient = await blobServiceClient.getContainerClient(containerName.containerName);
let form = new formidable.IncomingForm();
form.parse(req, async function (err, fields, files) {
const blobName = 'test' + uuidv1() + files.file;
// Get a block blob client
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
console.log('\nUploading to Azure storage as blob:\n\t', blobName);
// Upload data to the blob
const data = 'Hello test';
const uploadBlobResponse = await blockBlobClient.upload(data, data.length);
console.log("Blob was uploaded successfully. requestId: ", uploadBlobResponse.requestId);
});
}
}
module.exports = BlobController
could anyone help me on how can I upload files posted to Azure blob using Node.js
You were almost there :).
Please change your following code:
form.parse(req, async function (err, fields, files) {
const blobName = 'test' + uuidv1() + files.file;
// Get a block blob client
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
console.log('\nUploading to Azure storage as blob:\n\t', blobName);
// Upload data to the blob
const data = 'Hello test';
const uploadBlobResponse = await blockBlobClient.upload(data, data.length);
console.log("Blob was uploaded successfully. requestId: ", uploadBlobResponse.requestId);
});
to:
form.parse(req, async function (err, fields, files) {
const file = files.file;
const blobName = 'test' + uuidv1() + files.file;
const contentType = file.type;
const filePath = file.path;//This is where you get the file path.
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
const uploadBlobResponse = await blockBlobClient.uploadFile(filePath);
});
//Here's my form.parse code I used to upload pictures.
form.parse(req, async (err: any, fields: any, files: any) => {
const file = files.file;
const filePath = file.path;//This is where you get the file path. (this is the file itself)
const blobName: string = slugify(file.name);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
const uploadBlobResponse = await blockBlobClient.uploadFile(filePath)
console.log("Blob was uploaded successfully. requestId: ", uploadBlobResponse)
if (err) return reject(err)
//write to DB
//end write to DB
resolve(fields)
})
For anyone trying to make use of streams, this worked for me:
import formidable from 'formidable';
import { PassThrough } from 'stream';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
if (req.method == 'POST') {
const stream = new PassThrough();
const form = new formidable.IncomingForm({
fileWriteStreamHandler: () => {
return stream;
}
});
form.parse(req, (err, fields, files) => {
if (files) {
if (files['<form-file-input-name>']) {
const file = files['<form-file-input-name>'] as formidable.File;
const mimeType = file.mimetype;
const extension = file.originalFilename ? file.originalFilename.substring(file.originalFilename.lastIndexOf('.')) : '.csv';
const newFileName = `<form-file-input-name>-${new Date().toISOString()}${extension}`;
getFilesBlobContainer().getBlockBlobClient(newFileName).uploadStream(stream, undefined, undefined, {
blobHTTPHeaders: {
blobContentType: mimeType,
},
});
}
}
});
return res.status(200).end();
}
}
export const config = {
api: {
bodyParser: false, //Disable NextJS body parsing so formidable can do that itself (fails silently otherwise)
},
};

How to chain writeFile() and OCR with NodeJS in Google Cloud Functions?

The scenario is as follows:
From an Amazon S3 bucket a file is fetched, then it is stored in a temporary folder and then Object Character Recognition is to be performed using the API.
Unfortunately, this doesn't work, I think it's due to the asynchronous/synchronous execution, but I've already tried several variants with callbacks/promises and didn't get any further.
If someone can give me a hint on how to construct this scenario I would be grateful!
The current error is:
TypeError: Cannot read property 'writeFile' of undefined at Response.<anonymous> (/srv/index.js:38:32) (it's the 'await fs.writeFile(dir,data);' line)
/**
* Responds to any HTTP request.
*
* #param {!express:Request} req HTTP request context.
* #param {!express:Response} res HTTP response context.
*/
const AWS = require('aws-sdk');
const fs = require('fs').promises;
const Vision = require('#google-cloud/vision');
var os = require('os');
exports.helloWorld = async (req,res) => {
var bucket, fileName, fileUrl;
req.on('data', chunk => {
body += chunk.toString();
data.push(chunk);
});
req.on('end', () => {
bucket = JSON.parse(data).value1;
fileName = JSON.parse(data).value2;
fileUrl = JSON.parse(data).value3;
var s3 = new AWS.S3();
s3.getObject({
Bucket: bucket,
Key: fileName
},
async function(error, data) {
if (error != null) {
console.log("Failed to retrieve an object: " + error);
} else {
console.log("Loaded " + data.ContentType + " bytes");
var tmpdir = os.tmpdir();
var dir = tmpdir+'/'+fileName;
try{
await fs.writeFile(dir,data);
const vision = new Vision.ImageAnnotatorClient();
let text;
await vision
.textDetection('/tmp/' + fileName)
.then(([detections]) => {
const annotation = detections.textAnnotations[0];
console.log(1);
text = annotation ? annotation.description : '';
console.log(`Extracted text from image (${text.length} chars)`);
console.log(1);
console.log(text);
resolve("Finished ocr successfully");
})
.catch(error =>{
console.log(error);
reject("Error with OCR");
})
}catch(error){
console.log(error);
}
}
},
);
let message = bucket + fileName + fileUrl;
res.status(200).send(message);
});
};
You're getting that error, because you're running on an older version of Node (< 10.0.0), where fs.promises is not available. That's why fs is undefined, and you're getting:
TypeError: Cannot read property 'writeFile' of undefined at Response.<anonymous> (/srv/index.js:38:32) (it's the 'await fs.writeFile(dir,data);' line)
Either use a newer version, or just promisify the code.
const { promisify } = require('util');
const fs = require('fs');
// const fs = require('fs').promises
const writeFile = promisify(fs.writeFile);
And now use writeFile instead of fs.writeFile in your code.
Aside from that, there are a few issues with your code.
req.on('data', chunk => {
body += chunk.toString();
data.push(chunk);
});
data is not defined anywhere, and it doesn't make sense to push data into an array and then running JSON.parse on that array, given the next few lines.
bucket = JSON.parse(data).value1;
fileName = JSON.parse(data).value2;
fileUrl = JSON.parse(data).value3;
Furthermore, JSON.parse should be called only once, instead of parsing the same string (which is an array in your code, and will yield an error) 3 times.
const values = JSON.parse(body); // should be body instead of data with the posted code
bucket = values.value1;
fileName = values.value2;
fileUrl = values.value3;
This can be improved greatly by just posting bucket, fileName & fileUrl in the JSON instead of valueN.
const { bucket, fileName, fileUrl } = JSON.parse(body);
The whole code can be rewritten into:
const AWS = require('aws-sdk');
const { promisify } = require('util');
const fs = require('fs');
const Vision = require('#google-cloud/vision');
const os = require('os');
const path = require('path');
const writeFile = promisify(fs.writeFile);
exports.helloWorld = async (req,res) => {
let body = '';
req.on('data', chunk => {
body += chunk.toString();
});
req.on('end', async() => {
// post { "bucket": "x", "fileName": "x", "fileUrl": "x" }
const { bucket, fileName, fileUrl } = JSON.parse(body);
var s3 = new AWS.S3();
try {
const data = await s3.getObject({
Bucket: bucket,
Key: fileName
}).promise();
const tmpdir = os.tmpdir();
const filePath = path.join(tmpdir, fileName)
await writeFile(filePath, data);
const vision = new Vision.ImageAnnotatorClient();
const [detections] = await vision.textDetection(filePath)
const annotation = detections.textAnnotations[0];
const text = annotation ? annotation.description : '';
console.log(`Extracted text from image (${text.length} chars)`);
let message = bucket + fileName + fileUrl;
res.status(200).send(message);
} catch(e) {
console.error(e);
res.status(500).send(e.message);
}
});
};
NOTE: I don't know if Vision API works like this, but I used the same logic and parameters that you're using.

NodeJS How do I Download a file to disk from an aws s3 bucket?

My goal:
Display a dialog box prompting the user to save a file being downloaded from aws.
My problem:
I am currently using awssum-amazon-s3 to create a download stream. However I've only managed to save the file to my server or stream it to the command line... As you can see from my code my last attempt was to try and manually set the content disposition headers which failed. I cannot use res.download() as the headers have already been set?
How can I achieve my goal?
My code for node:
app.post('/dls/:dlKey', function(req, res, next){
// download the file via aws s3 here
var dlKey = req.param('dlKey');
Dl.findOne({key:dlKey}, function(err, dl){
if (err) return next(err);
var files = dl.dlFile;
var options = {
BucketName : 'xxxx',
ObjectName : files,
};
s3.GetObject(options, { stream : true }, function(err, data) {
// stream this file to stdout
fmt.sep();
data.Headers['Content-Disposition'] = 'attachment';
console.log(data.Headers);
data.Stream.pipe(fs.createWriteStream('test.pdf'));
data.Stream.on('end', function() {
console.log('File Downloaded!');
});
});
});
res.end('Successful Download Post!');
});
My code for angular:
$scope.dlComplete = function (dl) {
$scope.procDownload = true;
$http({
method: 'POST',
url: '/dls/' + dl.dlKey
}).success(function(data/*, status, headers, config*/) {
console.log(data);
$location.path('/#!/success');
}).error(function(/*data, status, headers, config*/) {
console.log('File download failed!');
});
};
The purpose of this code it to let users use a generated key to download a file once.
This is the entire code using streaming on the latest version of aws-sdk
var express = require('express');
var app = express();
var fs = require('fs');
app.get('/', function(req, res, next){
res.send('You did not say the magic word');
});
app.get('/s3Proxy', function(req, res, next){
// download the file via aws s3 here
var fileKey = req.query['fileKey'];
console.log('Trying to download file', fileKey);
var AWS = require('aws-sdk');
AWS.config.update(
{
accessKeyId: "....",
secretAccessKey: "...",
region: 'ap-southeast-1'
}
);
var s3 = new AWS.S3();
var options = {
Bucket : '/bucket-url',
Key : fileKey,
};
res.attachment(fileKey);
var fileStream = s3.getObject(options).createReadStream();
fileStream.pipe(res);
});
var server = app.listen(3000, function () {
var host = server.address().address;
var port = server.address().port;
console.log('S3 Proxy app listening at http://%s:%s', host, port);
});
This code worked for me with the most recent library:
var s3 = new AWS.S3();
var s3Params = {
Bucket: 'your bucket',
Key: 'path/to/the/file.ext'
};
s3.getObject(s3Params, function(err, res) {
if (err === null) {
res.attachment('file.ext'); // or whatever your logic needs
res.send(data.Body);
} else {
res.status(500).send(err);
}
});
Simply create a ReadStream from S3 and WriteStream to the location were u want to download. Find the code below. Works perfectly for me:
var AWS = require('aws-sdk');
var path = require('path');
var fs = require('fs');
AWS.config.loadFromPath(path.resolve(__dirname, 'config.json'));
AWS.config.update({
accessKeyId: AWS.config.credentials.accessKeyId,
secretAccessKey: AWS.config.credentials.secretAccessKey,
region: AWS.config.region
});
var s3 = new AWS.S3();
var params = {
Bucket: '<your-bucket>',
Key: '<path-to-your-file>'
};
let readStream = s3.getObject(params).createReadStream();
let writeStream = fs.createWriteStream(path.join(__dirname, 's3data.txt'));
readStream.pipe(writeStream);
You've already figured what's most important to solve your issue: you can pipe the file stream coming from S3 to any writable stream, be it a filestream… or the response stream that will be sent to the client!
s3.GetObject(options, { stream : true }, function(err, data) {
res.attachment('test.pdf');
data.Stream.pipe(res);
});
Note the use of res.attachment that will set the correct headers. You can also check out this answer regarding streams and S3.
Using aws SDK v3
npm install #aws-sdk/client-s3
download code
import { GetObjectCommand } from "#aws-sdk/client-s3";
/**
* download a file from AWS and send to your rest client
*/
app.get('/download', function(req, res, next){
var fileKey = req.query['fileKey'];
var bucketParams = {
Bucket: 'my-bucket-name',
Key: fileKey,
};
res.attachment(fileKey);
var fileStream = await s3Client.send(new GetObjectCommand(bucketParams));
// for TS you can add: if (fileStream.Body instanceof Readable)
fileStream.Body.pipe(res)
});
For this I use React frontend and node js backend. Frontend I use Axios. I used this click the button download file.
==== Node js backend code (AWS S3) ======
//inside GET method I called this function
public download = (req: Request, res: Response) => {
const keyName = req.query.keyName as string;
if (!keyName) {
throw new Error('key is undefined');
}
const downloadParams: AWS.S3.GetObjectRequest = {
Bucket: this.BUCKET_NAME,
Key: keyName
};
this.s3.getObject(downloadParams, (error, data) => {
if (error) {
return error;
}
res.send(data.Body);
res.end();
});
};
====== React js frontend code ========
//this function handle download button onClick
const downloadHandler = async (keyName: string) => {
const response = await axiosInstance.get( //here use axios interceptors
`papers/paper/download?keyName=${keyName}`,{
responseType:'blob', //very very important dont miss (if not downloaded file unsupported to view)
}
);
const url = window.URL.createObjectURL(new Blob([response.data]));
const link = document.createElement("a");
link.href = url;
link.setAttribute("download", "file.pdf"); //change "file.pdf" according to saved name you want, give extension according to filetype
document.body.appendChild(link);
link.click();
link.remove();
};
------ OR (if you are using normal axios and not axios interceptors) -----
axios({
url: 'http://localhost:5000/static/example.pdf',
method: 'GET',
responseType: 'blob', // very very important
}).then((response) => {
const url = window.URL.createObjectURL(new Blob([response.data]));
const link = document.createElement('a');
link.href = url;
link.setAttribute('download', 'file.pdf');
document.body.appendChild(link);
link.click();
});
For more refer below articles
1. article 1
2. article 2
Using express, based on Jushua's answer and https://docs.aws.amazon.com/AmazonS3/latest/userguide/example_s3_GetObject_section.html
public downloadFeedFile = (req: IFeedUrlRequest, res: Response) => {
const downloadParams: GetObjectCommandInput = parseS3Url(req.s3FileUrl.replace(/\s/g, ''));
logger.info("requesting S3 file " + JSON.stringify(downloadParams));
const run = async () => {
try {
const fileStream = await this.s3Client.send(new GetObjectCommand(downloadParams));
if (fileStream.Body instanceof Readable){
fileStream.Body.once('error', err => {
console.error("Error downloading s3 file")
console.error(err);
});
fileStream.Body.pipe(res);
}
} catch (err) {
logger.error("Error", err);
}
};
run();
};

Resources