Can I write to firestore without authentication with nodejs admin - node.js

I've written a cloud function which resizes images after uploading to cloud storage then returns the signed URL for the image uploaded but when I try to write the URL afterwards to firestore I get a PERMISSION_DENIED: Missing or insufficient permissions error. Is there a way around this without changing the firestore rules to allow anyone read/write access?
Below is the code:
import * as functions from "firebase-functions";
import * as admin from "firebase-admin";
import * as fs from "fs-extra";
import { tmpdir } from "os";
import { join, dirname } from "path";
import * as sharp from "sharp";
admin.initializeApp({
credential: admin.credential.applicationDefault()
});
const gcs = admin.storage();
const db = admin.firestore();
export const generateThumbs = functions.storage
.object()
.onFinalize(async object => {
const bucket = gcs.bucket(object.bucket);
const filePath = object.name as string;
const fileName = filePath.split("/").pop();
const bucketDir = dirname(filePath);
const workingDir = join(tmpdir(), "thumbs");
const tmpFilePath = join(workingDir, "source.png");
if (fileName?.includes("#") || !object.contentType?.includes("image")) {
console.log("exists, returning false...");
return false;
}
// Ensure thumbnail dir exists
await fs.ensureDir(workingDir);
// Download source file
await bucket.file(filePath).download({
destination: tmpFilePath
});
// Resize the images and define an array of upload promises
const sizes = [64, 128, 320, 640];
const uploadPromises = sizes.map(async size => {
const imageName = fileName?.split(".")[0];
const imageExt = fileName?.split(".")[1];
const thumbName = `${imageName}#${size}.${imageExt}`;
const thumbPath = join(workingDir, thumbName);
// Resize source images
return sharp(tmpFilePath)
.resize(size, size)
.toFile(thumbPath)
.then(outputInfo => {
// Upload to GCS
return bucket
.upload(thumbPath, {
destination: join(bucketDir, thumbName)
})
.then(async res => {
return res[0]
.getSignedUrl({
action: "read",
expires: "01-01-2040"
})
.then(signedUrlRes => {
console.log(`url: ${signedUrlRes[0]}`);
// const docRef = db
// .collection("/cities")
// .doc(imageName?.split("_")[0] as string);
// return db.runTransaction(t => {
// t.set(docRef, {
// imageUrl: signedUrlRes[0]
// });
// return Promise.resolve();
// });
return db
.collection("/cities")
.doc(imageName?.split("_")[0] as string)
.set({
imageUrl: signedUrlRes[0]
})
.then(res => console.log("written"))
.catch(e => {
console.log("Firebase write error");
console.log(e);
throw Error(e);
});
})
.catch(e => {
console.log(e);
throw Error(e);
});
});
});
});
// Run upload operations
await Promise.all(uploadPromises).catch(e => {
console.log(e);
throw Error(e.message);
});
return fs.remove(workingDir).catch(e => {
console.log(e);
throw Error(e.message);
});
});

changed admin.initializeApp() from
admin.initializeApp({
credential: admin.credential.applicationDefault()
});
to
admin.initializeApp(functions.config().firebase);

Related

How to perform an HTTP post request using express on Cloud Functions for Firebase using busboy

Hi I am trying to insert data in the database using a POST Request but the data is not being inserted.
On further investigation I found that to upload form-data, busboy needs to be used for image upload in firebase functions but I am not able to find a solution for using busboy with post method.
Hence if someone can please help me resolve this issue.
Below is the code for reference.
app.js
const express = require('express')
//const router = true;
const router = new express.Router()
const userInfo = require('../models/userProfile')
const multer = require('multer');
var fs = require('fs');
var path = require('path');
var JSONStream = require('JSONStream');
const planCheck = require('../models/planDetails');
const login = require('../models/login_creditionals');
const {Storage} = require('#google-cloud/storage');
const {format} = require('util');
const busboy = require('busboy');
const storage = new Storage({
projectId: "biz-1",
keyFilename: "/Users/akm/pixNodes/functions/pixbiz-65a402.json"
});
const bucket = storage.bucket("gs://biz-1");
const upload = multer({
storage: multer.memoryStorage(),
limits: {
fileSize: 10 * 1024 * 1024
}
})
const uploadImageToStorage = (file) => {
return new Promise((resolve, reject) => {
if (!file) {
reject('No image file');
}
let newFileName = `${Date.now() + path.extname(file.originalname)}`;
let fileUpload = bucket.file(newFileName);
const blobStream = fileUpload.createWriteStream({
metadata: {
contentType: file.mimetype
}
});
blobStream.on('error', (error) => {
reject('Something is wrong! Unable to upload at the moment.');
});
blobStream.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
const url = format(`https://storage.googleapis.com/${bucket.name}/${fileUpload.name}`);
resolve(url);
});
blobStream.end(file.buffer);
});
}
router.post('/userprofile/check' ,upload.single('profile_pic'), async (req,res) => {
var reqFiles;
var reqFilesUrl;
reqFiles = req.file;
if(reqFiles) {
// const imagerUrl = await uploadImageToStorage(reqFiles)
reqFilesUrl = imagerUrl;
console.log(reqFilesUrl);
const notify = new userInfo({
userId: req.body.userId,
mobile_number : req.body.mobileNumber,
profile_pic: reqFilesUrl
})
try {
console.log('success insert data');
await notify.save((err,post) => {
if(err) {
console.log(err);
}
//console.log('data saved', post);
res.status(201).send(post);
});
// });
// res.status(201).send();
console.log('201');
} catch(e) {
//res.status(401);
return res.send(e);
}

Kuzzle / Minio example usage

Dose the Kuzzle or Minio development teams have a working example of using the Kuzzle S3 plugin for Minio? I have the following but my file isnt being uploaded and the pre-signed url is referring to https://your-s3-bucket.s3.eu-west-3.amazonaws.com/
const fs = require("fs");
const fsPromises = require('fs').promises;
// Create a JS File object instance from a local path using Node.js
const fileObject = require("get-file-object-from-local-path");
// Promise based HTTP client for the browser and node.js
const axios = require('axios');
// Loads the Kuzzle SDK modules
const {
Kuzzle,
WebSocket
} = require('kuzzle-sdk');
var start = new Date();
const webSocketOptionsObject = {
"autoReconnect": true,
"ssl": true,
"port": 443
};
const kuzzle = new Kuzzle(new WebSocket('myurl.com', webSocketOptionsObject));
const credentials = { username: 'xyz123', password: 'fithenmgjtkj' };
const path = __dirname + "\\" + "yellow_taxi_data.csv"; // the "\\" is for Windows path
var fileData = {};
// check file exists
fs.access(path, fs.F_OK, (err) => {
if (err) {
console.error(err)
return
}
fileData = new fileObject.LocalFileData(path);
// Adds a listener to detect connection problems
kuzzle.on('networkError', error => {
console.error('Network Error:', error);
});
});
const connectToKuzzle = async () => {
// Connects to the Kuzzle server
await kuzzle.connect();
return await kuzzle.auth.login('local', credentials);
// console.log('jwt auth token: ', jwt);
}
const disConnectFromKuzzle = async () => {
console.log('Disconnected from Kuzzle');
kuzzle.disconnect();
var time = new Date() - start;
// sec = Math.floor((time/1000) % 60);
console.log('Execution time in milliseconds: ', time);
}
const presignedURL = async () => {
// Get a Presigned URL
const result = await kuzzle.query({
controller: 's3/upload',
action: 'getUrl',
uploadDir: 'proxybucket', // directory name inside the Bucket specified in the s3 plugin bucket name
filename: fileData.name
});
console.log("result: ", result);
return result;
}
const loadFileStream = async () => {
console.log('getting file: ', path);
targetFile = null;
await fs.promises.readFile(path)
.then(function (result) {
console.log("file loaded------", result.length);
targetFile = result;
})
.catch(function (error) {
console.log(error);
return;
});
return targetFile;
}
const kuzzleValidate = async (kuzzleResource) => {
// console.log("kuzzleResource: ", kuzzleResource.result.fileKey);
// validate
// Validate and persist a previsously uploaded file.
// https://docs.kuzzle.io/official-plugins/s3/2/controllers/upload/validate/
const Presult = await kuzzle.query({
// Kuzzle API params
"controller": "s3/upload",
"action": "validate",
// File key in S3 bucket
"fileKey": kuzzleResource.result.fileKey
});
console.log('validate: ', Presult.result.fileUrl);
}
const uploadFile = async (fileBuffer, kuzzleResource, jwt) => {
// options at https://github.com/axios/axios
const axiosOptions = {
headers: {
'Content-Type': fileData.type
},
maxBodyLength: 200000000 // 200,000,000 bytes 200 Mb
};
// PUT the fileBuffer to the Kuzzle S3 endpoint
// https://github.com/axios/axios
axios.defaults.headers.common['Authorization'] = jwt;
const response = await axios.put(kuzzleResource.result.uploadUrl, fileBuffer, axiosOptions)
.then((response) => {
console.log('file uploaded......');
})
.catch(function (error) {
console.log("File upload error: ", error);
return;
});
return "Upload successful";
}
if (fileData) {
connectToKuzzle().then((jwt) => {
console.log(jwt);
// upload(jwt);
presignedURL().then((kuzzleResource) => {
loadFileStream().then((fileBuffer) => {
uploadFile(fileBuffer, kuzzleResource, jwt).then((doneMessage) => {
console.log("doneMessage: ", doneMessage);
}).then(() => {
kuzzleValidate(kuzzleResource).then(() => {
disConnectFromKuzzle();
});
});
});
});
});
}
I'm looking to upload to a Minio bucket and obtain a pre-signedURL so I can store it in a document later.
You can change the endpoint configuration to set a different s3-compatible endpoint who can be a Minio one.
This configuration can be changer under the plugins.s3.endpoint key. You should also disable the usage of default s3 path.
Example:
app.config.set('plugins.s3.endpoint', 'https://minio.local');
app.config.set('plugins.s3.s3ClientOptions.s3ForcePathStyle', false);

image resize on node.js

I want to save images after resize so used sharp package but dont small image size.Iadded await sharp(uploadImage.name).resize(600, 600); line after save folder What am I missing?
const path = require('path');
const fs = require('fs');
const AdminBro = require('admin-bro');
const sharp=require('sharp');
/** #type {AdminBro.After<AdminBro.ActionResponse>} */
const after = async (response, request, context) => {
const { record, uploadImage } = context;
if (record.isValid() && uploadImage) {
// console.log(uploadImage.name);
await sharp(uploadImage.name).resize(600, 600);
const filePath = path.join('uploads', record.id().toString(), uploadImage.name);
await fs.promises.mkdir(path.dirname(filePath), { recursive: true });
await fs.promises.rename(uploadImage.path, filePath);
await record.update({ imagePath: `/${filePath}` });
}
return response;
};
/** #type {AdminBro.Before} */
const before = async (request, context) => {
if (request.method === 'post') {
const { uploadImage, ...otherParams } = request.payload;
// eslint-disable-next-line no-param-reassign
context.uploadImage = uploadImage;
return {
...request,
payload: otherParams,
};
}
return request;
};
module.exports = { after, before };
const filePath = path.join('uploads', record.id().toString(), uploadImage.name);
await fs.promises.mkdir(path.dirname(filePath), { recursive: true });
await sharp(uploadImage.path).withMetadata().resize(600,600).toFile(filePath);//here added correct paths but now although it saved image, doesnt reduce imageSize
You are not writing the processed image to a file.
Let's say this is your storage path:
const outputPath = __dirname + `/../storage/myImage.jpeg`;
You can give that path to sharp
await sharp('path/of/originalImage').withMetadata().resize(600, 600).toFile(outputPath);
And I highly recommend you to use withMetaData() otherwise you lose your images metaData

Trying to upload zip file to azure storage via node js

Trying to upload file to azure storage blob using nodejs but facing promise issue. below is my code facing issue while uploading zip file to azure blob storage need to convert this code to callback function or .then function
var re = /\.zip/;
fs.readdir("/tmp/", function(err, files) {
if (err) {
console.log( "Could not list the directory.", err)
process.exit( 1 )
}
console.log("files",files)
var matches = files.filter( function(text) { return re.test(text) } )
console.log("These are the files you have", matches)
var numFiles = matches.length
console.log(numFiles);
if ( numFiles ) {
// Read in the file, convert it to base64, store to S3
for(let i = 0; i < numFiles; i++ ) {
uploadFileToBlob(matches[i])
.then((result: any) => {
console.log('result');
console.log(result);
})
.catch((error: any) => {
console.log(error);
});
}
}
})
const uploadFileToBlob = async (file: any) => {
const { AbortController } = require("#azure/abort-controller");
const { AnonymousCredential, BlobServiceClient, newPipeline } = require("#azure/storage-blob");
const account = 'string';
const accountSas = "sastoken";
const localFilePath = file;
const pipeline = newPipeline(new AnonymousCredential(), {
// httpClient: MyHTTPClient, // A customized HTTP client implementing IHttpClient interface
retryOptions: { maxTries: 4 }, // Retry options
userAgentOptions: { userAgentPrefix: "AdvancedSample V1.0.0" }, // Customized telemetry string
keepAliveOptions: {
// Keep alive is enabled by default, disable keep alive by setting false
enable: false
}
});
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net${accountSas}`,
pipeline
);
const containerName = 'quickstart';
const containerClient = blobServiceClient.getContainerClient(containerName);
const blobName = file;
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
console.log("blockBlobClient",blockBlobClient);
try {
const result =await blockBlobClient.uploadFile(localFilePath, {
blockSize: 4 * 1024 * 1024, // 4MB block size
concurrency: 20, // 20 concurrency
onProgress: (ev: any) => console.log(ev)
});
console.log("uploadFile succeeds");
return result;
} catch (err) {
console.log(
`uploadFile failed, requestId - ${err.details.requestId}, statusCode - ${err.statusCode}, errorCode - ${err.details.errorCode}`
);
throw err;
}
}
getting output
Promise { <pending> }
I tried to write code in callback function or .then function but getting same error and same code is working in local machine
Please change the following lines of code:
await blockBlobClient.uploadFile(localFilePath, {
blockSize: 4 * 1024 * 1024, // 4MB block size
concurrency: 20, // 20 concurrency
onProgress: (ev) => console.log(ev)
});
console.log("uploadFile succeeds");
to
const result = await blockBlobClient.uploadFile(localFilePath, {
blockSize: 4 * 1024 * 1024, // 4MB block size
concurrency: 20, // 20 concurrency
onProgress: (ev) => console.log(ev)
});
console.log("uploadFile succeeds");
return result;
And then call your uploadFileToBlob method like:
uploadFileToBlob('filename.zip')
.then((result) => {
console.log('result');
console.log(result);
})
.catch((error) => {
console.log(error);
});
UPDATE
Here's the complete code I used. I was able to upload the file successfully:
const uploadFileToBlob = async (file) => {
const { AbortController } = require("#azure/abort-controller");
const { AnonymousCredential, BlobServiceClient, newPipeline } = require("#azure/storage-blob");
const account = 'account-name';
const accountSas = '?sv=2020-04-08&ss=b&srt=sco&se=2021-03-08T18%3A30%3A00Z&sp=rwdxftlacup&sig=pT1d1NJQdu3bcnPA37voTCyQ6jKnmNVYgo1wyiYspDc%3D';
const localFilePath = file;
const pipeline = newPipeline(new AnonymousCredential(), {
// httpClient: MyHTTPClient, // A customized HTTP client implementing IHttpClient interface
retryOptions: { maxTries: 4 }, // Retry options
userAgentOptions: { userAgentPrefix: "AdvancedSample V1.0.0" }, // Customized telemetry string
keepAliveOptions: {
// Keep alive is enabled by default, disable keep alive by setting false
enable: false
}
});
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net${accountSas}`,
pipeline
);
const containerName = 'container-name';
const containerClient = blobServiceClient.getContainerClient(containerName);
const blobName = file;
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
console.log("blockBlobClient",blockBlobClient);
try {
const result = await blockBlobClient.uploadFile(localFilePath, {
blockSize: 4 * 1024 * 1024, // 4MB block size
concurrency: 20, // 20 concurrency
onProgress: (ev) => console.log(ev)
});
console.log("uploadFile succeeds");
return result;
} catch (err) {
console.log(
`uploadFile failed, requestId - ${err.details.requestId}, statusCode - ${err.statusCode}, errorCode - ${err.details.errorCode}`
);
throw err;
}
}
uploadFileToBlob('filename.zip')
.then((result) => {
console.log('result');
console.log(result);
})
.catch((error) => {
console.log(error);
});

GridFSBucket Errors

When I try to use GridFSBucket from mongodb, chunks are created in fs.chunks collection, but no fs.files documents are created. Is there a reason why this maybe happening or is this a bug?
This will work on my local machine and docker on my local machine but not on docker on an AWS EC2.
const { MongoClient, GridFSBucket } = require('mongodb');
const crypto = require('crypto');
const dotenv = require('dotenv');
dotenv.config();
const url = process.env.MONGO_URI;
const dbName = 'tree_club';
const opts = { useUnifiedTopology: true };
const getPosts = async () => {
const client = await MongoClient.connect(url, opts);
const db = client.db(dbName);
const posts = await db.collection('post').find({}).sort({ created: -1 }).toArray();
client.close();
return posts;
};
const createPost = async (parent, { markdown, file }) => {
const client = await MongoClient.connect(url, opts);
const db = client.db(dbName);
const bucket = new GridFSBucket(db);
const fileId = file ? crypto.randomBytes(64).toString('hex') : undefined;
const { ops: [post] } = await db.collection('post').insertOne({
markdown,
fileId,
created: Date.now()
});
if (file) {
await new Promise(async (resolve, reject) => {
const { createReadStream } = await file;
const readStream = createReadStream();
const writeStream = bucket.openUploadStreamWithId(fileId, fileId);
readStream.pipe(writeStream);
readStream.on('error', () => {
console.log('error');
});
readStream.on('finish', () => {
resolve();
client.close();
});
});
} else {
client.close();
}
return post;
};
const pipeImage = async (req, res) => {
const client = await MongoClient.connect(url, opts);
const db = client.db(dbName);
const bucket = new GridFSBucket(db);
try {
const readStream = bucket.openDownloadStream(req.params.fileId);
readStream.pipe(res);
readStream.on('finish', () => {
client.close();
});
} catch (err) {
res.status(400).json('Image was not found');
}
};
module.exports = { getPosts, createPost, pipeImage };
I searched for the answer to this question and turns out there's an error in Node.js version 13 with streams.
I downgraded my Node.js version to 12 and the code above worked perfectly.

Resources