Download File on Request in Firebase - node.js

I am looking for a solution to directly download a file in the Firebase Storage when hitting an API endpoint. I tried initializing a Google-Cloud Storage and downloading the file from the bucket.
const app = require('express')();
const { Storage } = require("#google-cloud/storage");
const storage = new Storage({keyFilename: keyPath});
app.get("/download", (req, res) => {
storage.bucket(bucketName).file("file.txt").download({destination: './file.txt'});
});
app.listen(8080);
But this does not work!
I simply get:
UnhandledPromiseRejectionWarning: Error: Not Found
Could someone help me, please?

Where did you initialize the app
Original answer:
// Dependencies
const express = require('express')
const PORT = process.env.PORT || 3002;
// Initialize the App
const app = express();
// Start the app
app.listen(PORT, () => {
console.info(`Server is listening on port ${PORT}`);
});
Update:
Making HTTP requests to download files is an asynchronous operation. You need to wait for the file to be downloaded from the Google Cloud Storage before sending it to the client
const app = require('express')();
const { Storage } = require("#google-cloud/storage");
const storage = new Storage({keyFilename: keyPath});
// I am using async/await here
app.get("/download", async (req, res) => {
// You have to wait till the file is downloaded
await storage.bucket(bucketName).file("file.txt").download({destination: './file.txt'});
// Send the file to the client
res.download('./file.txt')
});
app.listen(8080);

If the intention is to stream the file to the requesting client, you can pipe the data from Cloud Storage through to the response. It will look similar to the following:
const {Storage} = require('#google-cloud/storage');
const express = require('express');
const BUCKET_NAME = 'my-bucket';
const app = express();
const storage = new Storage({keyFilename: './path/to/service/key.json'});
app.get("/download", (req, res) => {
storage.bucket(bucketName).file("path/in/bucket/to/file.txt").createReadStream()
.on('error', (err) => {
res.status(500).send('Internal Server Error');
console.log(err);
})
.on('response', (storageResponse) => {
// make sure to check storageResponse.status
res.setHeader('content-type', storageResponse.headers['Content-Type']);
res.setHeader('content-length', storageResponse.headers['Content-Length']);
res.status(storageResponse.status);
// other headers may be necessary
// if status != 200, make sure to end the response as appropriate. (as it won't reach the below 'end' event)
})
.on('end', () => {
console.log('Piped file successfully.');
res.end();
}).pipe(res);
});
app.listen(8080);

Related

Transform NextJs output into JSON using ExpressJs

For migration purposes I have to transform the content generated by nextjs into JSON format like {content: "generated markup"} in expressjs.
const express = require('express');
const next = require('next');
const port = 8080;
const dev = process.env.NODE_ENV !== 'production';
const nextApp = next({dev});
const handle = nextApp.getRequestHandler();
nextApp.prepare().then(() => {
const server = express();
server.all('*', async (req, res) => {
return handle(req, res);
});
server.use((req, res, next) => {
/* How to set res.json({content:<RESULT_FROM_NEXT_JS>})??? */
});
server.listen(port, () => {
console.log(`[server]: Server is running at http://localhost:${port}`);
});
});
What I understand so far is that next creates a stream of chucked data but I do not know how to block this stream to then create a json from it. Any clue on how to build a middleware for that? Or any other idea on how to generate a JSON in this format?

Downloading JSON file in expressjs and reading it

I have a task where I am given a URL such as https://xyz.json. This URL prompts the downloading of the JSON file into the local. I am now required to read the use this JSON data for further processing. Since I am new to NodeJS and express, I find myself confused about how to achieve this in ExpressJS.
This is what I've tried :
const https = require("https");
const fs = require("fs");
const file = fs.createWriteStream("outputFile.json");
const request = https.get(
"https://xyz.json",
function (response) {
response.pipe(file);
// after download completed close filestream
file.on("finish", () => {
file.close();
console.log("Download Completed");
});
}
);
Here, in the outputFile.json, no data is present
Qn2) Can I periodically download using setTimeOut(). Would it be efficient or is there any better way of caching data to make the application faster?
Thanks in advance!
Here's a sample app that downloads a json triggered when you hit an API route hosted as ExpressJS sever.
const express = require('express');
const cors = require('cors');
const morgan = require('morgan');
const bodyParser = require('body-parser');
const axios = require('axios');
const fs = require('fs');
const app = express();
app.use(cors());
app.use(morgan(':method :url :status :user-agent - :response-time ms'));
app.use(bodyParser.json());
app.get('/', async (req, res) => {
try {
const { status, data } = await axios.get('http://52.87.135.24/json-files/events.json'); // Can be replaced by your json url
if (status === 200) {
fs.writeFileSync('data.json', JSON.stringify(data));
res.status(200).json({
success: 'Downloaded file.',
data: data // Comment it if you don't want to send the data back
})
} else {
res.status(404).json({ 'Failed': 'File not found.' })
}
} catch (err) {
console.log(err);
res.status(500).json({ 'Error': 'Internal Server Error' });
}
});
app.listen(process.env.PORT || 3000, function () {
console.log('Express app running on port ' + (process.env.PORT || 3000))
});
And as I mentioned that this download gets triggered every time you make a request on http://localhost:3000 in this case, you can create a client script that acts like a cron job in which you can use the setTimeout or actually, setInterval to download your file periodically.
const axios = require('axios');
setInterval(async () => {
await axios.get('http://localhost:3000/');
}, 5000);
Here's such a script along! :)

Google Cloud Storage (setting up) NodeJS

So I am looking at the sample code from Google and I can't work out how do I activate the config file?
https://cloud.google.com/appengine/docs/flexible/nodejs/using-cloud-storage
The sample code:
const {format} = require('util');
const express = require('express');
const Multer = require('multer');
const bodyParser = require('body-parser');
// By default, the client will authenticate using the service account file
// specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable and use
// the project specified by the GOOGLE_CLOUD_PROJECT environment variable. See
// https://github.com/GoogleCloudPlatform/google-cloud-node/blob/master/docs/authentication.md
// These environment variables are set automatically on Google App Engine
const {Storage} = require('#google-cloud/storage');
// Instantiate a storage client
const storage = new Storage();
const app = express();
app.set('view engine', 'pug');
app.use(bodyParser.json());
// Multer is required to process file uploads and make them available via
// req.files.
const multer = Multer({
storage: Multer.memoryStorage(),
limits: {
fileSize: 5 * 1024 * 1024, // no larger than 5mb, you can change as needed.
},
});
// A bucket is a container for objects (files).
const bucket = storage.bucket(process.env.GCLOUD_STORAGE_BUCKET);
// Display a form for uploading files.
app.get('/', (req, res) => {
res.render('form.pug');
});
// Process the file upload and upload to Google Cloud Storage.
app.post('/upload', multer.single('file'), (req, res, next) => {
if (!req.file) {
res.status(400).send('No file uploaded.');
return;
}
// Create a new blob in the bucket and upload the file data.
const blob = bucket.file(req.file.originalname);
const blobStream = blob.createWriteStream();
blobStream.on('error', (err) => {
next(err);
});
blobStream.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
const publicUrl = format(
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
);
res.status(200).send(publicUrl);
});
blobStream.end(req.file.buffer);
});
const PORT = process.env.PORT || 8080;
app.listen(PORT, () => {
console.log(`App listening on port ${PORT}`);
console.log('Press Ctrl+C to quit.');
});
However when you read https://github.com/GoogleCloudPlatform/google-cloud-node/blob/master/docs/authentication.md it says to set up a config file and do the following
{
"projectId": "grape-spaceship-123",
"keyFilename": "./PROJECT-XXXXXX.json"
}
The keyFilename links to the google generated JSON.
But now how do I tell the sample code above to use that?
NOTE: Adding the config file
const config = require('./config')
I created storage and its work for me:
import { Storage } from '#google-cloud/storage';//may be you need to use require()
import * as path from 'path';
const storage = new Storage({
keyFilename: path.join(__dirname, '../********************.json'),
projectId: '***********Id'
})
const fileBucket = storage.bucket('***********-storage');
Good video about this: https://www.youtube.com/watch?v=pGSzMfKBV9Q

NodeJS & GCP PubSub - TypeError: PubSub is not a constructor at Object.<anonymous>

I'm following a tutorial at https://www.woolha.com/tutorials/node-js-google-cloud-pub-sub-basic-examples and having some difficulty..
I've the following code in server.js:-
const express = require('express');
const app = express();
const path = require('path');
const bodyParser = require('body-parser');
const dotenv = require('dotenv');
dotenv.config(); // Reads the .env file from the local folder.
// PubSub constant initialisation
const PubSub = require(`#google-cloud/pubsub`);
const pubsub = new PubSub();
const data = new Date().toString();
const dataBuffer = Buffer.from(data);
const topicName = 'sensehat-led-config';
app.use(bodyParser.urlencoded({ extended: true}));
// Tell the app to use the public folder.
app.use(express.static('public'));
app.get('/', (req,res) => {
res.send('Hello from App Engine!');
})
app.get('/submit', (req, res) => {
res.sendFile(path.join(__dirname, '/views/form.html'));
})
// Need to figure out how to get the css file to work in this. Can't be that hard.
app.get('/sensehat', (req, res) => {
res.sendFile(path.join(__dirname, '/views/sensehat.html'));
})
app.get('/sensehat-publish-message', (req, res) =>{
pubsub
.topic(topicName)
.publisher()
.publish(dataBuffer)
.then(messageId => {
console.log(`Message ${messageId} published`);
})
.catch(err => {
console.error('ERROR:', err);
});
})
app.post('/submit', (req, res) => {
console.log({
name: req.body.name,
message: req.body.message
});
res.send('Thanks for your message!');
})
// Listen to the App Engine-specified port, or 8080 otherwise
const PORT = process.env.PORT || 8080;
app.listen(PORT, () => {
console.log('Server listening on port ${PORT}...');
})
But when I run it I get a '500 Server Error', and looking at the Stackdriver logs I get the following error:-
TypeError: PubSub is not a constructor at Object.<anonymous>
I'm definitely a newbie at NodeJS and feeling my way around. After reading around I think the issue is coming from the
const PubSub = require(`#google-cloud/pubsub`);
const pubsub = new PubSub();
lines, but no idea how to rectify this.
You can try with latest versions of all libraries.
Dependencies in package.json
"dependencies": {
"#google-cloud/pubsub": "1.5.0",
"google-gax": "1.14.1",
"googleapis": "47.0.0"
}
Example code -
const {
PubSub
} = require('#google-cloud/pubsub');
const pubsub = new PubSub({
projectId: process.env.PROJECT_ID
});
module.exports = {
publishToTopic: function(topicName, data) {
return pubsub.topic(topicName).publish(Buffer.from(JSON.stringify(data)));
},
};
Calling file code
const PubSubPublish = require('path to your above file')
let publishResult = await PubSubPublish.publishToTopic(process.env.TOPIC_NAME, data)
Hope it helps!
You require the default export of #google-cloud/pubsub, but what look for is not in the default export.
Change the way you import PubSub to:
const {PubSub} = require(`#google-cloud/pubsub`);
Instead of:
const PubSub = require(`#google-cloud/pubsub`);

How use nodejs and postman to upload file into firebase?

I have a file nodejs that adds the picture(lion.jpg) to Cloud Storage
const firebase = require('firebase-admin');
const express = require('express');
const app = express();
const serviceAccount= require("./key9525")
firebase.initializeApp({
credential: firebase.credential.cert(serviceAccount),
databaseURL: "https://myadress.firebaseio.com" //example adress
});
const bucketName = 'myadress.appspot.com';
const filename ='./lion.jpg'; //example file
async function uploadFile() {
const {Storage} = require('#google-cloud/storage');
const storage = new Storage();
await storage.bucket(bucketName).upload(filename, {
gzip: true,
metadata: {
cacheControl: 'public, max-age=31536000',
},
});
console.log(`${filename} uploaded to ${bucketName}.`);
}
uploadFile();
and I have a file that allows me to select and upload a photo for example in postman
const express = require('express');
const app = express();
const port = 3000;
app.get('/', (req, res) => {
res.send('hello people');
});
app.listen(port, () => {
console.log('listening to the port: ' + port);
});
var multer = require('multer');
var upload = multer({dest:'uploads/'});
app.post('/single', upload.single('profile'), (req, res) => {
try {
res.send(req.file);
}catch(err) {
res.send(400);
}
}
How I can connect these codes so that after loading the nodejs file, then selecting the file in postman, the file was upload in firebase?
thank you for all tips
You have to pass the multer as the middleware to your POST call as like what you did. Please refer this link

Resources