Renamed picture in Firebase not showing after opening url - node.js

I am trying to implement an upload profile image feature for a users collection. I implement an async function as such
exports.async_upload_image = async function(req, res) {
const path = require("path");
const os = require("os");
const fs = require("fs");
const { Storage } = require('#google-cloud/storage');// end up no need
let gcs = new Storage({
projectId: config.projectId
});
try {
const newFilePath = req.filepath;
const baseName = newFilePath.split("/").pop();
if (req.mimetype !== "image/jpeg" && req.mimetype !== "image/png") {
console.log("Wrong file type submitted");
return null;
}
// my.image.png => ['my', 'image', 'png']
const image_extension = newFilePath.split(".")[newFilePath.split(".").length - 1];
// 32756238461724837.png
let generated_token = uuid();
let image_filename = `${generated_token}.${image_extension}`;
const processed_path = path.join(os.tmpdir(), image_filename);
//creates a copy of image file inside the temporary path
const input_file = fs.createReadStream(newFilePath);
const output_file = fs.createWriteStream(processed_path);
input_file.pipe(output_file);
//upload to the firebase storage from the temporary path
await gcs.bucket(config.storageBucket).upload(processed_path, {
gzip: true,
metadata: {
cacheControl: "no-cache",
contentType: req.mimetype,
firebaseStorageDownloadTokens: generated_token
}
})
const imageUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${image_filename}?alt=media&token=${generated_token}`;
// await gcs.bucket(config.storageBucket).upload(newFilePath, {
// gzip: true,
// metadata: {
// cacheControl: "no-cache",
// contentType: req.mimetype,
// firebaseStorageDownloadTokens: generated_token
// }
// })
// const imageUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${baseName}?alt=media&token=${generated_token}`;
await db.collection(USERS_PUBLIC_COLLECTION).doc(req.user_id).update({
profile_image:
{
uid: generated_token,
url: imageUrl
}
})
console.log(`Update profile to uploaded image ${generated_token} successfully`);
return success_response();
} catch (error) {
console.log(error);
}
}
And wrote this at the bottom and ran with node file.js in the same file.
const req = {
filepath: some_file_path,
mimetype: "image/png",
user_id: "valid_user_id"
}
exports.async_upload_image(req);
The picture does get uploaded to storage as well as updating my document in the Firestore, but it was intended that accessing URL under the url in the profile_image map will allow me to see the picture. This works for the unprocessed picture, whose code segment is commented out, but not for the changed image. I also noted that the file size is incredibly small, around 20B. Can someone tell me why and what might be a better way to upload images with firebase? Feel free to clarify if more info is required to solve the problem.

Related

node js express file upload is corrupted

Uploaded files are the same size 16 bytes and corrupted. What am I doing wrong?
looks like the server code is working as it should, most likely a client side issue. But I could be wrong, so I added everything.
server side code:
import path from 'path'
import fs from 'fs'
import { fileURLToPath } from 'url'
class authController {
downloadFile(req, res) {
console.log(req.query)
try {
const parentFolder = req.query.parentFolder
const folderId = req.query.folderId
const fileName = req.query.fileName
const __filename = fileURLToPath(import.meta.url)
const __dirname = path.dirname(__filename)
const file = path.join(__dirname, `../files/${parentFolder}/${folderId}/${fileName}`)
res.download(file, fileName)
} catch (e) {
console.log(e)
res.status(500).json({ message: 'Something went wrong, please try again' })
}
}
}
export default new authController()
client side code:
onClick={async function (e) {
e.stopPropagation()
const request = 'downloadFile'
const response = await fetch(`/api/auth/${request}?parentFolder=${parentFolder}&folderId=${folderId}&fileName=${item}`, {
headers: {
Authorization: 'Bearer ' + auth.token,
},
})
if (response.status === 200) {
console.log(response)
const blob = response.blob()
const downloadUrl = window.URL.createObjectURL(new Blob([blob], { type: 'image/png' }))
const link = document.createElement('a')
link.href = downloadUrl
link.download = item
document.body.appendChild(link)
link.click()
link.remove()
}
}}
I managed to solve the problem. Replaced
const blob = response.blob()
with const blob = await response.blob() and everything worked.

I would like to send multiple images to the S3 from amazon. This is my code so far, sending just one image

I'm using TYPESCRIPT and NODEJS. In addition to sending the results to the database in POSTGRESSQL.
ROUTER.TS
router.post(
"/image",
isAuthenticated,
upload.single("file"),
async (req, res) => {
const { file } = req;
const product_id = req.query.product_id as string;
const uploadImagesService = new UploadImagesService();
await uploadImagesService.execute(file);
const createImage = await prismaClient.uploadImage.create({
data: {
url: `https://upload-joias.s3.amazonaws.com/${file.filename}`,
id: file.filename,
product_id: product_id,
},
});
return res.send(createImage);
}
);
SERVICE.TS
import S3Storage from "../../utils/S3Storage";
class UploadImagesService {
async execute(file: Express.Multer.File): Promise<void> {
const s3Storage = new S3Storage();
await s3Storage.saveFile(file.filename);
}
}
export { UploadImagesService };
S3Storage.ts
async saveFile(filename: string): Promise<void> {
const originalPath = path.resolve(uploadConfig.diretory, filename);
const contentType = mime.getType(originalPath);
if (!contentType) {
throw new Error("File not found");
}
const fileContent = await fs.promises.readFile(originalPath);
this.client
.putObject({
Bucket: "upload-joias",
Key: filename,
ACL: "public-read",
Body: fileContent,
ContentType: contentType,
})
.promise();
await fs.promises.unlink(originalPath);
}
I'm having a hard time dealing with this, I'm new to node js and typescript. I'm grateful for any help.

Upload image to s3 bucket - react native and node js

Within my app a user can select a profile image and i would like that image to be uploaded to an s3 bucket when the user saves their profile data
I pass the image data (and json, which consists of name, email, telephone for example) from my app to an express server and upload there
At present I can pass the image data (the url it seems at present) to an s3 bucket and it saves
I don't think i'm actually saving the image itself though, as when downloading from s3 (manually) and trying to open on my mac it states it may be damaged and i cannot see the image
Feel daft for asking but how do i actually upload the image itself? Thanks
React Native Side
const handleFormSubmit = formData => {
const jsonData = JSON.stringify({
...formData,
});
// Handle profile image
if (imageProps && imageProps.uri) {
const data = new FormData();
data.append('formBody', jsonData);
data.append('image', {
uri:
Platform.OS === 'android'
? imageProps.uri
: imageProps.uri.replace('file://', ''),
type: imageProps.type,
name: imageProps.fileName,
});
sendRequest(data);
} else {
sendRequest(jsonData);
}
};
const sendRequest = data => {
let responseData;
fetch('http://localhost:8080/users/api/update_user_profile', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Accept: 'application/json',
},
body: data,
})
.then(response => {
responseData = response;
return response.json();
})
.then(jsonData => {
console.log(jsonData)
})
.catch(error => {
console.log(error)
});
};
Server Side
const s3 = new AWS.S3({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY
});
// Setting up S3 upload parameters
const params = {
Bucket: 'bucket-folder',
ACL: 'public-read',
Key: req.files.image.name,
Body: req.files.image.path
};
const stored = await s3.upload(params).promise();
You can use Multer for uploading files to s3.
const multer = require('multer');
const AWS = require('aws-sdk');
const uniqid = require('uniqid');
const storage = multer.memoryStorage();
const upload = multer({ storage });
// ? Posts new file to amazon and saves to db
router.post(
'/:id',
upload.single('attachment'),
async (req, res) => {
const unique = uniqid.time();
const { file } = req;
const { filePath } = req.body;
const { id } = req.params;
const s3FileURL = process.env.AWS_UPLOADED_FILE_URL;
const region = process.env.AWS_REGION;
const secretAccessKey = process.env.AWS_SECRET_ACCESS_KEY;
const accessKeyId = process.env.AWS_ACCESS_KEY_ID;
const Bucket = process.env.AWS_BUCKET_NAME + '/' + filePath;
const Key = `${id}/${unique}-${file.originalname}`;
const Body = file.buffer;
const ContentType = file.mimetype;
const ACL = 'public-read';
const s3bucket = new AWS.S3({
accessKeyId,
secretAccessKey,
region,
});
const params = {
Bucket,
Key,
Body,
ContentType,
ACL,
};
s3bucket.upload(params, async (err, data) => {
if (err) {
res.status(500).json({ error: true, Message: err });
} else {
console.log(params);
const newFileUploaded = {
description: req.body.description,
fileLink: `${s3FileURL}${filePath}/${id}/${unique}-${file.originalname}`,
s3_key: params.Key,
};
try {
const response = await postFile({
name: req.body.name,
attachment: newFileUploaded,
alt: req.body.alt,
user: req.body.user,
relatedID: req.body.relatedID,
});
res.status(200).json({
message: response.message,
success: response.success,
result: response.result,
});
} catch (e) {
res.status(500).json({
message:
'File upoladed but Db couldnt saved request (upload by ID)',
success: false,
result: [],
});
}
}
});
}
);

Download image from URL, save to Firebase Store, and serve image URL

Having a hard time finding some code to do this and I'm actually having trouble myself trying to get this working. I want to create Firebase Function which would call an image
URL (this URL has a token associated with it from Mapbox). Then I want to write the image to Firebase Storage so I can reference it from there later to serve it. So far I've figured out how to fetch the image but saving it to storage seems to be too much for me! Help!
exports.getPolylineFromSessionId = functions.https.onRequest(async (req, res) => {
const sessionId = req.query.id;
if (sessionId) {
const sessionInfo = await db
.collection('sessions')
.doc(sessionId)
.get();
const session = sessionInfo.data();
const results = encodeURIComponent(polyline.encode(convertGpsTrack(session.gpsTrack)));
const url =
'https://api.mapbox.com/styles/v1/mapbox/satellite-v9/static/path-1+fff-0.5(' + results + ')/auto/700x200?access_token=' + mapboxToken;
const bucket = admin
.storage()
.bucket('gs://mybucket.appspot.com')
.file('thumbnails/' + sessionId + '.jpg');
const res = await fetch(url, {
method: 'GET',
headers: {
'Content-Type': 'image/jpeg',
},
});
const blob = await res.blob();
bucket.save(blob, {
metadata: {
contentType: 'image/jpeg',
},
});
res.status(200).send(session);
} else {
res.status(400).send('sessionId required');
}
});
I was able to figure it out finally! Below is the code I used!
var file = admin
.storage()
.bucket('gs://mybucket.appspot.com')
.file('thumbnails/' + sessionId + '.jpg');
request({ url: url, encoding: null }, function(err, response, buffer) {
var stream = file.createWriteStream({
metadata: {
contentType: response.headers['content-type'],
},
});
stream.end(buffer);
});

How to get downloadUrl firebase storage with firebase admin sdk and put in img html's tag in nodejs with multer?

I am building the image upload system. I want to use the Firebase Admin sdk for handling this process. How to get the url to store in my Google Cloud Storage and fetch them on display in img tag (html) when the uploading is completed.
try {
const file = req.file;
const bucket = admin.storage().bucket();
const imageBuffer = Buffer.from(file.buffer, "base64");
const imageByteArray = new Uint8Array(imageBuffer);
const options = {
resumable: false,
metadata: { contentType: file.mimetype },
predefinedAcl: "publicRead",
public: true,
};
const files = bucket.file(`img/${file.originalname}`);
await files.save(imageByteArray, options);
const field = await files.getMetadata();
console.log(field);
} catch (e) {
console.error(e);
}
With the Node.js Admin SDK you should do as follows, using the getSignedUrl() method:
const { initializeApp, cert } = require('firebase-admin/app');
const { getStorage } = require('firebase-admin/storage');
const serviceAccount = require('./path/to/serviceAccountKey.json');
initializeApp({
credential: cert(serviceAccount),
storageBucket: '<BUCKET_NAME>.appspot.com'
});
// ...
async function getSignedURL(fileName) {
const bucket = getStorage().bucket();
// Or const bucket = admin.storage().bucket(); like you do in your question, depending on where and how you execute the code
const file = bucket.file(fileName);
const signedURLconfig = { action: 'read', expires: '01-01-2030' };
// See more options of the config object in the SDK documentation (link above)
const signedURLArray = await file.getSignedUrl(signedURLconfig);
return signedURLArray[0];
}
// Use the above function to save the signed URL to Firestore
const fileName = "...";
getSignedURL(fileName)
.then(signedURL => {
admin.firestore().collection("urls_collection")
.add({url: signedURL, foo: "bar", bar: "foo"})
});

Resources