Express JS API req.body shows buffer data - node.js

I created an API below:
app.post("/categories", async (req, res) => {
console.log(`req.body: ${JSON.stringify(req.body)}`)
console.log(`req.body.title: ${JSON.stringify(req.body.title)}`)
console.log(`req.files: ${JSON.stringify(req.files)}`)
res.json({})
});
Where the data passed is:
{
"title": "Video Title"
"description": "Video Description"
"thumbnail": [object File]
"video": [object File]
}
The data passed is powered by VueJS and Axios:
methods: {
async createCategory() {
const formData = new window.FormData();
formData.set("title", this.category.title);
formData.set("description", this.category.description);
formData.set("thumbnail", this.thumbnail);
formData.set("video", this.video);
await $this.axios.post("clothing/v1/categories/", formData, {
headers: { "Content-Type": "multipart/form-data" },
});
}
}
However the shown data in the req.body is:
req.body: {"type":"Buffer","data":[45,45,45,45,45,45,87,101,98,75,105,116,70,111,114,109,66,111,117,110,100,97,114,121,121,104,112,52,54,97,82,89,68,121,77,82,57,66,52,110,13,10,67,111,110,116,101,110,116,45,68,105,115,112,111,115,105,116,105,111,110,58,32,102,111,114,109,45,100,97,116,97,59,32,110,97,109,101,61,34,116,105,116,108,101,34,13,10,13,10,86,105,100,101,111,32,84,105,116,108,101,13,10,45,45,45,45,45,45,87,101,98,75,105,116,70,111,114,109,66,111,117,110,100,97,114,121,121,104,112,52,54,97,82,89,68,121,77,82,57,66,52,110,13,10,67,111,110,116,101,110,116,45,68,105,115,112,111,115,105,116,105,111,110,58,32,102,111,114,109,45,100,97,116,97,59,32,110,97,109,101,61,34,100,101,115,99,114,105,112,116,105,111,110,34,13,10,13,10,86,105,100,101,111,32,68,101,115,99,114,105,112,116,105,111,110,13,10,45,45,45,45,45,45,87,101,98,75,105,116,70,111,114,109,66,111,117,110,100,97,114,121,121,104,112,52,54,97,82,89,68,121,77,82,57,66,52,110,13,10,67,111,110,116,101,110,116,45,68,105,115,112,111,115,105,116,105,111,110,58,32,102,111,114,109,45,100,97,116,97,59,32,110,97,109,101,61,34,116,104,117,109,98,110,97,105,108,34,13,10,13,10,91,111,98,106,101,99,116,32,70,105,108,101,93,13,10,45,45,45,45,45,45,87,101,98,75,105,116,70,111,114,109,66,111,117,110,100,97,114,121,121,104,112,52,54,97,82,89,68,121,77,82,57,66,52,110,13,10,67,111,110,116,101,110,116,45,68,105,115,112,111,115,105,116,105,111,110,58,32,102,111,114,109,45,100,97,116,97,59,32,110,97,109,101,61,34,118,105,100,101,111,34,13,10,13,10,91,111,98,106,101,99,116,32,70,105,108,101,93,13,10,45,45,45,45,45,45,87,101,98,75,105,116,70,111,114,109,66,111,117,110,100,97,114,121,121,104,112,52,54,97,82,89,68,121,77,82,57,66,52,110,45,45,13,10]}
I am hoping that I can retrieve my passed data inside my API something like: req.body: {"title":"Example","description":"example"} as I will use these data to save in FireStore and upload the files in Cloud Storage.
NOTE:
I tried using multer but got the error below:
> return fn.apply(this, arguments);
> ^
>
> TypeError: Cannot read properties of undefined (reading 'apply')
> at Immediate.<anonymous> (/Users/adminadmin/Desktop/projects/dayanara-environments/dayanara-clothing-api/functions/node_modules/express/lib/router/index.js:641:15)
> at processImmediate (node:internal/timers:468:21)

I did not mention that I was developing NodeJS with Google Cloud Functions and only in local and testing development.
The error below always shows whenever there is any kind of error in my code.
> return fn.apply(this, arguments);
> ^
>
> TypeError: Cannot read properties of undefined (reading 'apply')
> at Immediate.<anonymous> (/Users/adminadmin/Desktop/projects/dayanara-environments/dayanara-clothing-api/functions/node_modules/express/lib/router/index.js:641:15)
> at processImmediate (node:internal/timers:468:21)
As for the multipart, I used busboy like below:
app.post("/categories", (req, res) => {
let writeResult;
const storageRef = admin.storage().bucket(`gs://${storageBucket}`);
const busboy = Busboy({headers: req.headers});
const tmpdir = os.tmpdir();
// This object will accumulate all the fields, keyed by their name
const fields = {};
// This object will accumulate all the uploaded files, keyed by their name.
const uploads = {};
// This code will process each non-file field in the form.
busboy.on('field', (fieldname, val) => {
/**
* TODO(developer): Process submitted field values here
*/
console.log(`Processed field ${fieldname}: ${val}.`);
fields[fieldname] = val;
});
const fileWrites = [];
// This code will process each file uploaded.
busboy.on('file', (fieldname, file, {filename}) => {
// Note: os.tmpdir() points to an in-memory file system on GCF
// Thus, any files in it must fit in the instance's memory.
console.log(`Processed file ${filename}`);
const filepath = path.join(tmpdir, filename);
uploads[fieldname] = filepath;
const writeStream = fs.createWriteStream(filepath);
file.pipe(writeStream);
// File was processed by Busboy; wait for it to be written.
// Note: GCF may not persist saved files across invocations.
// Persistent files must be kept in other locations
// (such as Cloud Storage buckets).
const promise = new Promise((resolve, reject) => {
file.on('end', () => {
writeStream.end();
});
writeStream.on('finish', resolve);
writeStream.on('error', reject);
});
fileWrites.push(promise);
});
// Triggered once all uploaded files are processed by Busboy.
// We still need to wait for the disk writes (saves) to complete.
busboy.on('finish', async () => {
console.log('finished busboy')
await Promise.all(fileWrites);
/**
* TODO(developer): Process saved files here
*/
for (const file in uploads) {
const filePath = uploads[file]
const name = fields.name.replaceAll(' ', '-')
const _filePath = filePath.split('/')
const fileName = _filePath[_filePath.length - 1]
const destFileName = `${name}/${fileName}`
// eslint-disable-next-line no-await-in-loop
const uploaded = await storageRef.upload(filePath, {
destination: destFileName
})
const _file = uploaded[0];
const bucketFile = "https://firebasestorage.googleapis.com/v0/b/" + storageBucket + "/o/" + encodeURIComponent(_file.name) + "?alt=media"
fields[file] = bucketFile
}
writeResult = await admin
.firestore()
.collection(collection)
.add({
name: fields.name,
description: fields.description,
timestamp: admin.firestore.Timestamp.now(),
thumbnail: fields.thumbnail,
video: fields.video
});
const written = await writeResult.get();
res.json(written.data());
}
});
Then I needed to change how I pass formData from my VueJS and Axios where I replaced using model to refs on my file data. I only needs to use model in Django so I thought it would be the same on ExpressJS:
methods: {
async createCategory() {
const formData = new window.FormData();
const thumbnail = this.$refs.thumbnail;
const video = this.$refs.video;
formData.set("name", this.category.name);
formData.set("description", this.category.description);
formData.set("thumbnail", thumbnail.files[0]);
formData.set("video", video.files[0]);
await $this.axios.post("clothing/v1/categories/", formData, {
headers: { "Content-Type": "multipart/form-data" },
});
}
}
After the changes above, I can finally send multi-part/form-data properly. Resources below helped me a lot:
https://cloud.google.com/functions/docs/samples/functions-http-form-data#functions_http_form_data-nodejs
Handling multipart/form-data POST with Express in Cloud Functions

Related

Delivering image from S3 to React client via Context API and Express server

I'm trying to download a photo from an AWS S3 bucket via an express server to serve to a react app but I'm not having much luck. Here are my (unsuccessful) attempts so far.
The Workflow is as follows:
Client requests photo after retrieving key from database via Context API
Request sent to express server route (important so as to hide the true location from the client)
Express server route requests blob file from AWS S3 bucket
Express server parses image to base64 and serves to client
Client updates state with new image
React Client
const [profilePic, setProfilePic] = useState('');
useEffect(() => {
await actions.getMediaSource(tempPhoto.key)
.then(resp => {
console.log('server resp: ', resp.data.data.newTest) // returns ����\u0000�\u0000\b\u0006\
const url = window.URL || window.webkitURL;
const blobUrl = url.createObjectURL(resp.data.data.newTest);
console.log("blob ", blobUrl);
setProfilePic({ ...profilePic, image : resp.data.data.newTest });
})
.catch(err => errors.push(err));
}
Context API - just axios wrapped into its own library
getMediaContents = async ( key ) => {
return await this.API.call(`http://localhost:5000/${MEDIA}/mediaitem/${key}`, "GET", null, true, this.state.accessToken, null);
}
Express server route
router.get("/mediaitem/:key", async (req, res, next) => {
try{
const { key } = req.params;
// Attempt 1 was to try with s3.getObject(downloadParams).createReadStream();
const readStream = getFileStream(key);
readStream.pipe(res);
// Attempt 2 - attempt to convert response to base 64 encoding
var data = await getFileStream(key);
var test = data.Body.toString("utf-8");
var container = '';
if ( data.Body ) {
container = data.Body.toString("utf-8");
} else {
container = undefined;
}
var buffer = (new Buffer.from(container));
var test = buffer.toString("base64");
require('fs').writeFileSync('../uploads', test); // it never wrote to this directory
console.log('conversion: ', test); // prints: 77+977+977+977+9AO+/vQAIBgYH - this doesn't look like base64 to me.
delete buffer;
res.status(201).json({ newTest: test });
} catch (err){
next(ApiError.internal(`Unexpected error > mediaData/:id GET -> Error: ${err.message}`));
return;
}
});
AWS S3 Library - I made my own library for using the s3 bucket as I'll need to use more functionality later.
const getFileStream = async (fileKey) => {
const downloadParams = {
Key: fileKey,
Bucket: bucketName
}
// This was attempt 1's return without async in the parameter
return s3.getObject(downloadParams).createReadStream();
// Attempt 2's intention was just to wait for the promise to be fulfilled.
return await s3.getObject(downloadParams).promise();
}
exports.getFileStream = getFileStream;
If you've gotten this far you may have realised that I've tried a couple of things from different sources and documentation but I'm not getting any further. I would really appreciate some pointers and advice on what I'm doing wrong and what I could improve on.
If any further information is needed then just let me know.
Thanks in advance for your time!
Maybe it be useful for you, that's how i get image from S3, and process image on server
Create temporary directory
createTmpDir(): Promise<string> {
return mkdtemp(path.join(os.tmpdir(), 'tmp-'));
}
Gets the file
readStream(path: string) {
return this.s3
.getObject({
Bucket: this.awsConfig.bucketName,
Key: path,
})
.createReadStream();
}
How i process file
async MainMethod(fileName){
const dir = await this.createTmpDir();
const serverPath = path.join(
dir,
fileName
);
await pipeline(
this.readStream(attachent.key),
fs.createWriteStream(serverPath + '.jpg')
);
const createFile= await sharp(serverPath + '.jpg')
.jpeg()
.resize({
width: 640,
fit: sharp.fit.inside,
})
.toFile(serverPath + '.jpeg');
const imageBuffer = fs.readFileSync(serverPath + '.jpeg');
//my manipulations
fs.rmSync(dir, { recursive: true, force: true }); //delete temporary folder
}

Use Cloud Function download a JSON file from URL then upload to a Cloud Storage bucket, status 200 but JSON file uploaded is only 20 bytes and empty

I'm trying to use the cloud function to download a JSON file from here: http://jsonplaceholder.typicode.com/posts? then upload it to Cloud Storage bucket.
Log of function execution seems fine, the status returns 200. However, the JSON file uploaded to the bucket is only 20 Bytes and it is empty (while the original file is ~27 KB)
So please help me if I missed something, there is code and logs:
index.js
const {Storage} = require('#google-cloud/storage');
exports.writeToBucket = (req, res) => {
const http = require('http');
const fs = require('fs');
const file = fs.createWriteStream("/tmp/post.json");
const request = http.get("http://jsonplaceholder.typicode.com/posts?", function(response) {
response.pipe(file);
});
console.log('file downloaded');
// Imports the Google Cloud client library
const {Storage} = require('#google-cloud/storage');
// Creates a client
const storage = new Storage();
const bucketName = 'tft-test-48c87.appspot.com';
const filename = '/tmp/post.json';
// Uploads a local file to the bucket
storage.bucket(bucketName).upload(filename, {
gzip: true,
metadata: {
cacheControl: 'no-cache',
},
});
res.status(200).send(`${filename} uploaded to ${bucketName}.`);
};
package.json
{
"name": "sample-http",
"version": "0.0.1",
"dependencies": {
"#google-cloud/storage": "^3.0.3"
}
}
Result:
Log:
As pointed by #DazWilkin, there are issues with asynchronous code. You must wait for onfinish() to trigger and then proceed. Also the upload() method returns a promise too. Try refactoring your function in async-await syntax as shown below:
exports.writeToBucket = async (req, res) => {
const http = require('http');
const fs = require('fs');
// Imports the Google Cloud client library
const {Storage} = require('#google-cloud/storage');
// Creates a client
const storage = new Storage();
const bucketName = 'tft-test-48c87.appspot.com';
const filename = '/tmp/post.json';
await downloadJson()
// Uploads a local file to the bucket
await storage.bucket(bucketName).upload(filename, {
gzip: true,
metadata: {
cacheControl: 'no-cache',
},
});
res.status(200).send(`${filename} uploaded to ${bucketName}.`);
}
const downloadJson = async () => {
const Axios = require('axios')
const fs = require("fs")
const writer = fs.createWriteStream("/tmp/post.json")
const response = await Axios({
url: "http://jsonplaceholder.typicode.com/posts",
method: 'GET',
responseType: 'stream'
})
response.data.pipe(writer)
return new Promise((resolve, reject) => {
writer.on('finish', resolve)
writer.on('error', reject)
})
}
This example uses Axios but you can do the same with http.
Do note that you can directly upload the fetched JSON as a file like this:
exports.writeToBucket = async (req, res) => {
const Axios = require("axios");
const { Storage } = require("#google-cloud/storage");
const storage = new Storage();
const bucketName = "tft-test-48c87.appspot.com";
const filename = "/tmp/post.json";
const { data } = await Axios.get("http://jsonplaceholder.typicode.com/posts");
const file = storage.bucket(bucketName).file("file.json");
const contents = JSON.stringify(data);
await file.save(contents);
res.status(200).send(`${filename} uploaded to ${bucketName}.`);
};
You can read more about the save() method in the documentation.
I don't write much NodeJS but I think your issue is with async code.
You create the stream and then issue the http.get but you don't block on the callback (piping the file) completing before you start the GCS upload.
You may want to attach an .on("finish", () => {...}) to the pipe and in that callback, upload the file to GCS.
NOTE IIRC GCS has a method that will let you write a stream directly from memory rather than going through a file.
NOTE if you pull the storage object up into the global namespace, it will only be created whenever the instance is created and not every time the function is invoked.
You don't need a write stream to get the URL data, fetch the URL, await the response to resolve, call the appropriate response.toJson() method.
Personally, I prefer to use Fetch and Axios over http as they are cleaner to work with. But with Nodes http you can do the following:
https.get(url,(res) => {
let body = "";
res.on("data", (chunk) => {
body += chunk;
});
res.on("end", () => {
try {
let json = JSON.parse(body);
// do something with JSON
} catch (error) {
console.error(error.message);
};
});
}).on("error", (error) => {
console.error(error.message);
});
Once you have that, you can pass it directly to a storage method as a data blob or byte array.
byte[] byteArray = resultJson.toString().getBytes("UTF-8");

Upload and unzip .zip file from E-Mail in Google Cloud Functions deleting .zip after completion nodeJS

I am receiving an E-Mail containing one single .zip file. Right now im trying to upload it to GCS and handle (Bulk-Decompress) the file into its own folder. Running a Bulk Dataflow for such a small action seems overkill to me.
I was thinking about using the "unzip-stream" package but until now, i did not come up with an efficient solution to my problem.
Is it even possible to process a file like this in a Cloud-Function? Or is there no way without a dedicated server handling decompression and then uploading the content into GCS?
Heres my code:
const os = require('os');
const fs = require('fs');
const { Storage } = require('#google-cloud/storage');
const storage = new Storage();
// Node.js doesn't have a built-in multipart/form-data parsing library.
// Instead, we can use the 'busboy' library from NPM to parse these requests.
const Busboy = require('busboy');
exports.uploadZIPFile = (req, res) => {
if (req.method !== 'POST') {
// Return a "method not allowed" error
return res.status(405).end();
}
const busboy = new Busboy({ headers: req.headers });
const tmpdir = os.tmpdir();
for (a in req.headers) {
console.log(`Header: ${a}`);
}
// This object will accumulate all the fields, keyed by their name
const fields = {};
// This code will process each non-file field in the form.
busboy.on('field', (fieldname, val) => {
// TODO(developer): Process submitted field values here
console.log(`Processed field ${fieldname}: ${val}.`);
fields[fieldname] = val;
});
// This object will accumulate all the uploaded files, keyed by their name.
const uploads = {};
const fileWrites = [];
// This code will process each file uploaded.
busboy.on('file', (fieldname, file, filename) => {
// Note: os.tmpdir() points to an in-memory file system on GCF
// Thus, any files in it must fit in the instance's memory.
console.log(`Processed file ${filename} - ${fieldname} - ${file}`);
const filepath = path.join(tmpdir, filename);
uploads[fieldname] = filepath;
const writeStream = fs.createWriteStream(filepath);
file.pipe(writeStream);
// File was processed by Busboy; wait for it to be written to disk.
const promise = new Promise((resolve, reject) => {
file.on('end', () => {
writeStream.end();
});
writeStream.on('finish', resolve);
writeStream.on('error', reject);
});
fileWrites.push(promise);
});
// Triggered once all uploaded files are processed by Busboy.
// We still need to wait for the disk writes (saves) to complete.
busboy.on('finish', async () => {
await Promise.all(fileWrites);
// TODO(developer): Process saved files here
for (const file in uploads) {
async function upload2bucket() {
// Uploads a local file to the bucket
const bucketName = 'myBucket';
const todayDate = new Date().toISOString().slice(0, 10);
await storage.bucket(bucketName).upload(uploads[file], {
// Support for HTTP requests made with `Accept-Encoding: gzip`
gzip: true,
// By setting the option `destination`, you can change the name of the
// object you are uploading to a bucket.
destination:
'zip-inbox/' + todayDate + '_' + uploads[file].substring(5),
metadata: {
// Enable long-lived HTTP caching headers
// Use only if the contents of the file will never change
// (If the contents will change, use cacheControl: 'no-cache')
cacheControl: 'no-cache',
},
});
console.log(`${filename} uploaded to ${bucketName}.`);
}
if (uploads[file].endsWith('.zip')) {
await upload2bucket();
}
console.log(`${file}: ${uploads[file]}`);
//fs.unlinkSync(file);
}
res.status(200).send('Success');
});
busboy.end(req.rawBody);
};
Cloud Functions are great for small tasks, which take little time. When you have a similar requirement - you want to spin an instance for a unit of work only when needed, but cannot predict the time to execute it, I suggest using Cloud Run, have a look at this use case
You may replace your "do-everything" cloud function with several functions and create a pipeline with PubSub messages or with Google Workflows (released in Jan, 2021).
I also recommend to use firebase-functions package to simplify cloud functions definitions.
Pipeline could be like this:
Create and schedule a function which gets attachments from the mailbox and uploads them to Google Storage. After each attachment has been uploaded it sends a PubSub message with the filepath payload to the next step.
Create another function which executes on message from PubSub and unpacks uploaded earlier file.
Note: a function can execute for not more than 9 minutes. Split the function if it takes longer than the limit.
Here is a pseudo code for the proposed pipeline:
import * as functions from 'firebase-functions';
import {PubSub} from '#google-cloud/pubsub';
const REGION = 'your-google-cloud-region'; // e.g. europe-west1
const PUBSUB_TOPIC_UNPACK_ZIP = 'your-topic-name-for-step-2';
/**
* Step 1.
*/
exports.getFromEmailAndUploadToStorage = functions
.region(REGION)
.runWith({
memory: '256MB',
timeoutSeconds: 540, // Max execution duration is 9 minutes
maxInstances: 1
})
.pubsub
.schedule('every 15 minutes')
.timeZone('Asia/Vladivostok')
.onRun(async _ => {
const filepath = await yourLogicToFetchEmailAndUploadToStorage();
if (filepath) {
const messagePayload = {filepath};
console.log('Publishing message to the topic:', PUBSUB_TOPIC_UNPACK_ZIP);
console.log('Payload:', JSON.stringify(messagePayload));
await new PubSub().topic(PUBSUB_TOPIC_UNPACK_ZIP).publishMessage({
json: messagePayload
});
}
});
/**
* Step 2.
*/
exports.unzipFilesInStorage = functions
.region(REGION)
.runWith({
memory: '256MB',
timeoutSeconds: 540, // Max execution duration is 9 minutes
maxInstances: 1
})
.pubsub
.topic(PUBSUB_TOPIC_UNPACK_ZIP )
.onPublish(async (message, context) => {
const {filepath} = message?.json;
if (!filepath) {
throw new Error('filepath is not set.');
}
// your unzip logic here.
// after unzipping you can pipe the results further via PubSub messages.
});

How to use bucket.upload() instead of file.createWriteStream() in Google Cloud Storage?

I'm trying to get the permanent (unsigned) download URL after uploading a file to Google Cloud Storage. I can get the signed download URL using file.createWriteStream() but file.createWriteStream() doesn't return the UploadResponse that includes the unsigned download URL. bucket.upload() includes the UploadResponse, and Get Download URL from file uploaded with Cloud Functions for Firebase has several answers explaining how to get the unsigned download URL from the UploadResponse. How do I change file.createWriteStream() in my code to bucket.upload()? Here's my code:
const {Storage} = require('#google-cloud/storage');
const storage = new Storage({ projectId: 'my-app' });
const bucket = storage.bucket('my-app.appspot.com');
var file = bucket.file('Audio/' + longLanguage + '/' + pronunciation + '/' + wordFileType);
const config = {
action: 'read',
expires: '03-17-2025',
content_type: 'audio/mp3'
};
function oedPromise() {
return new Promise(function(resolve, reject) {
http.get(oedAudioURL, function(response) {
response.pipe(file.createWriteStream(options))
.on('error', function(error) {
console.error(error);
reject(error);
})
.on('finish', function() {
file.getSignedUrl(config, function(err, url) {
if (err) {
console.error(err);
return;
} else {
resolve(url);
}
});
});
});
});
}
I tried this, it didn't work:
function oedPromise() {
return new Promise(function(resolve, reject) {
http.get(oedAudioURL, function(response) {
bucket.upload(response, options)
.then(function(uploadResponse) {
console.log('Then do something with UploadResponse.');
})
.catch(error => console.error(error));
});
});
}
The error message was Path must be a string. In other words, response is a variable but needs to be a string.
I used the Google Cloud text-to-speech API to simulate what you are doing. Getting the text to create the audio file from a text file. Once the file was created, I used the upload method to add it to my bucket and the makePublic method to got its public URL. Also I used the async/await feature offered by node.js instead of function chaining (using then) to avoid the 'No such object: ..." error produced because the makePublic method is executed before the file finishes uploading to the bucket.
// Imports the Google Cloud client library
const {Storage} = require('#google-cloud/storage');
// Creates a client using Application Default Credentials
const storage = new Storage();
// Imports the Google Cloud client library
const textToSpeech = require('#google-cloud/text-to-speech');
// Get the bucket
const myBucket = storage.bucket('my_bucket');
// Import other required libraries
const fs = require('fs');
const util = require('util');
// Create a client
const client = new textToSpeech.TextToSpeechClient();
// Create the variable to save the text to create the audio file
var text = "";
// Function that reads my_text.txt file (which contains the text that will be
// used to create my_audio.mp3) and saves its content in a variable.
function readFile() {
// This line opens the file as a readable stream
var readStream = fs.createReadStream('/home/usr/my_text.txt');
// Read and display the file data on console
readStream.on('data', function (data) {
text = data.toString();
});
// Execute the createAndUploadFile() fuction until the whole file is read
readStream.on('end', function (data) {
createAndUploadFile();
});
}
// Function that uploads the file to the bucket and generates it public URL.
async function createAndUploadFile() {
// Construct the request
const request = {
input: {text: text},
// Select the language and SSML voice gender (optional)
voice: {languageCode: 'en-US', ssmlGender: 'NEUTRAL'},
// select the type of audio encoding
audioConfig: {audioEncoding: 'MP3'},
};
// Performs the text-to-speech request
const [response] = await client.synthesizeSpeech(request);
// Write the binary audio content to a local file
const writeFile = util.promisify(fs.writeFile);
await writeFile('my_audio.mp3', response.audioContent, 'binary');
console.log('Audio content written to file: my_audio.mp3');
// Wait for the myBucket.upload() function to complete before moving on to the
// next line to execute it
let res = await myBucket.upload('/home/usr/my_audio.mp3');
// If there is an error, it is printed
if (res.err) {
console.log('error');
}
// If not, the makePublic() fuction is executed
else {
// Get the file in the bucket
let file = myBucket.file('my_audio.mp3');
file.makePublic();
}
}
readFile();
bucket.upload() is a convenience wrapper around file.createWriteStream() that takes a local filesystem path and upload the file into the bucket as an object:
bucket.upload("path/to/local/file.ext", options)
.then(() => {
// upload has completed
});
To generate a signed URL, you'll need to get a file object from the bucket:
const theFile = bucket.file('file_name');
The file name will either be that of your local file, or if you specified an alternate remote name options.destination for the file on GCS.
Then, use File.getSignedUrl() to get a signed URL:
bucket.upload("path/to/local/file.ext", options)
.then(() => {
const theFile = bucket.file('file.ext');
return theFile.getSignedURL(signedUrlOptions); // getSignedURL returns a Promise
})
.then((signedUrl) => {
// do something with the signedURL
});
See:
Bucket.upload() documentation
File.getSignedUrl() documentation
You can make a specific file in a bucket publicly readable with the method makePublic.
From the docs:
const {Storage} = require('#google-cloud/storage');
const storage = new Storage();
// 'my-bucket' is your bucket's name
const myBucket = storage.bucket('my-bucket');
// 'my-file' is the path to your file inside your bucket
const file = myBucket.file('my-file');
file.makePublic(function(err, apiResponse) {});
//-
// If the callback is omitted, we'll return a Promise.
//-
file.makePublic().then(function(data) {
const apiResponse = data[0];
});
Now the URI http://storage.googleapis.com/[BUCKET_NAME]/[OBJECT_NAME] is a public link to the file, as explained here.
The point is that you only need this minimal code to make an object public, for instance with a Cloud Function. Then you already know how the public link is and can use it directly in your app.

How to process a scraped image and upload to firebase storage using firebase functions?

I'm trying to grab some HD images from urls, resize them and upload to storage.
So far, i've gotten the image, and resized using sharp. The output API of sharp uses .toFile('output.jpg') or .toBuffer(), and I'm not sure how to proceed from here. What would be the easiest way to output the image, and upload it to firebase storage?
My code so far:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp();
const request = require('request').defaults({ encoding: null });
const sharp = require('sharp');
exports.copyThumbnailImage = functions.firestore.document('users/{userId}/vocab/{vocabId}').onCreate((snapshot, context) => {
// mock: copyThumbnailImage({ chosenImages: [{ googleThumbnailUrl: "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQlC7Vnu9CuZlA-nTpW8TLPd8dAE456LCpeXoadUKHoxB7WEmM1rfahqsfr", mime: "image/jpeg", originalUrl: "https://storage.tenki.jp/storage/static-images/suppl/article/image/2/27/278/27810/1/large.jpg" }] }, { params: { userId: 'zYCw6DmcEiOS8Yk4QltYPikykwP2', vocabId: 1420970 } })
const data = snapshot.data()
const vocabId = context.params.vocabId
const images = data.chosenImages
const checkExistencePromises = []
// Promises check if image ref already exists in firestore
images.forEach(imageObj => {
checkExistencePromises.push(db.collection('userImages').where('originalUrl', '==', imageObj.originalUrl).where('vocabId', '==', vocabId).get())
})
return Promise.all(checkExistencePromises)
.then(snapshots => {
const getImagePromises = []
snapshots.forEach((snapshot, i) => {
if (snapshot.empty) {
// if image doesn't exist already, upload thumbnail to DB, add doc to userImages and add link to review
const originalUrl = images[i].originalUrl
getImagePromises.push(getImage(originalUrl))
} else {
// add link to review
}
})
return Promise.all(getImagePromises)
})
.then(responses => {
responses.forEach(response => {
sharp(response).resize(200, 200).toBuffer()
// ????????
})
})
.then(() => {
})
.catch(error => {
console.log(error)
})
})
function getImage (url) {
return new Promise((resolve, reject) => {
request.get(url, (err, res, body) => {
if (err) reject(err)
resolve(body)
})
})
}
You can save it to the local file system (the local /tmp disk) and upload it to Cloud Storage from there.
Have a look at this official Cloud Functions sample: https://github.com/firebase/functions-samples/blob/master/convert-images/functions/index.js. (I copy below the code for future reference)
In particular, look at how they save a temporary file with
return spawn('convert', [tempLocalFile, tempLocalJPEGFile]);
and how they upload it with:
return bucket.upload(tempLocalJPEGFile, {destination: JPEGFilePath});
In your case, instead of calling spawn() you would call
.toFile(-theTemporaryFielName-)
Finally, have a look at Write temporary files from Google Cloud Function and Attach firebase cloud function or cache its data from cloud function call about the /tmp disk.
Code from the Cloud Function Sample as of 08/01/2018 (link above)
const functions = require('firebase-functions');
const mkdirp = require('mkdirp-promise');
const gcs = require('#google-cloud/storage')();
const spawn = require('child-process-promise').spawn;
const path = require('path');
const os = require('os');
const fs = require('fs');
// File extension for the created JPEG files.
const JPEG_EXTENSION = '.jpg';
/**
* When an image is uploaded in the Storage bucket it is converted to JPEG automatically using
* ImageMagick.
*/
exports.imageToJPG = functions.storage.object().onFinalize((object) => {
const filePath = object.name;
const baseFileName = path.basename(filePath, path.extname(filePath));
const fileDir = path.dirname(filePath);
const JPEGFilePath = path.normalize(path.format({dir: fileDir, name: baseFileName, ext: JPEG_EXTENSION}));
const tempLocalFile = path.join(os.tmpdir(), filePath);
const tempLocalDir = path.dirname(tempLocalFile);
const tempLocalJPEGFile = path.join(os.tmpdir(), JPEGFilePath);
// Exit if this is triggered on a file that is not an image.
if (!object.contentType.startsWith('image/')) {
console.log('This is not an image.');
return null;
}
// Exit if the image is already a JPEG.
if (object.contentType.startsWith('image/jpeg')) {
console.log('Already a JPEG.');
return null;
}
const bucket = gcs.bucket(object.bucket);
// Create the temp directory where the storage file will be downloaded.
return mkdirp(tempLocalDir).then(() => {
// Download file from bucket.
return bucket.file(filePath).download({destination: tempLocalFile});
}).then(() => {
console.log('The file has been downloaded to', tempLocalFile);
// Convert the image to JPEG using ImageMagick.
return spawn('convert', [tempLocalFile, tempLocalJPEGFile]);
}).then(() => {
console.log('JPEG image created at', tempLocalJPEGFile);
// Uploading the JPEG image.
return bucket.upload(tempLocalJPEGFile, {destination: JPEGFilePath});
}).then(() => {
console.log('JPEG image uploaded to Storage at', JPEGFilePath);
// Once the image has been converted delete the local files to free up disk space.
fs.unlinkSync(tempLocalJPEGFile);
fs.unlinkSync(tempLocalFile);
return;
});
});

Resources