Building an app user profile photo component. I set up a Google Cloud storage bucket without public access and using fine-grained ACLs, and a service acct with Storage Admin role. I downloaded JSON key, placed into root directory alongside package.json, and referenced in my file upload/download controller:
const processFile = require("../middleware/upload");
const { format } = require("util");
const { Storage } = require("#google-cloud/storage");
const storage = new Storage({ keyFilename: "my-service-account.json" });
const bucket = storage.bucket("my-gcp-storage-bucket");
const upload = async (req, res) => {
try {
await processFile(req, res);
if (!req.file) {
return res.status(400).send({ message: "Please upload a file!" });
}
const blob = bucket.file(req.file.originalname);
const blobStream = blob.createWriteStream({
resumable: false,
});
blobStream.on("error", (err) => {
res.status(500).send({ message: err.message });
});
blobStream.on("finish", async (data) => {
const publicUrl = format(
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
);
try {
await bucket.file(req.file.originalname).makePublic();
} catch {
return res.status(500).send({
message:
`Uploaded the file successfully: ${req.file.originalname}, but public access is denied!`,
url: publicUrl,
});
}
res.status(200).send({
message: "Uploaded the file successfully: " + req.file.originalname,
url: publicUrl,
});
});
blobStream.end(req.file.buffer);
} catch (err) {
console.log(err);
if (err.code == "LIMIT_FILE_SIZE") {
return res.status(500).send({
message: "File size cannot be larger than 2MB!",
});
}
res.status(500).send({
message: `Could not upload the file: ${req.file.originalname}. ${err}`,
});
}
};
const getListFiles = async (req, res) => {
try {
const [files] = await bucket.getFiles();
let fileInfos = [];
files.forEach((file) => {
fileInfos.push({
name: file.name,
url: file.metadata.mediaLink,
});
});
res.status(200).send(fileInfos);
} catch (err) {
console.log(err);
res.status(500).send({
message: "Unable to read list of files!",
});
}
};
const download = async (req, res) => {
try {
const [metaData] = await bucket.file(req.params.name).getMetadata();
res.redirect(metaData.mediaLink);
} catch (err) {
res.status(500).send({
message: "Could not download the file. " + err,
});
}
};
module.exports = {
upload,
getListFiles,
download,
};
I am able to upload into my bucket and list files - but I cannot download or see image previews in my React frontend. I get this message when trying to use the download API:
Anonymous caller does not have storage.objects.get access to the Google Cloud Storage object. Permission 'storage.objects.get' denied on resource (or it may not exist).
These specifically are what does not work:
const getListFiles = async (req, res) => {
try {
const [files] = await bucket.getFiles();
let fileInfos = [];
files.forEach((file) => {
fileInfos.push({
name: file.name,
url: file.metadata.mediaLink,
});
});
res.status(200).send(fileInfos);
} catch (err) {
console.log(err);
res.status(500).send({
message: "Unable to read list of files!",
});
}
};
const download = async (req, res) => {
try {
const [metaData] = await bucket.file(req.params.name).getMetadata();
res.redirect(metaData.mediaLink);
} catch (err) {
res.status(500).send({
message: "Could not download the file. " + err,
});
}
};
I can't figure out what's wrong here, as I can upload and list files - just not download. Any ideas?
Make sure that the service account has necessary permissions to download files .Refer to this document1 & document2 for more information.
The authenticated user must have the storage.objects.get IAM
permission to use this method. To return object ACLs, the
authenticated user must also have the storage.objects.getIamPolicy
permission.
Also initialize gcloud environment gcloud init then set up application default credentials by running gcloud auth application-default login.
Related
Upload to google cloud storage using node.js doesn't have error but uploads zero bytes
I am using Google-Cloud-Storage. This code will save an object in the bucket but it is just empty. It shows a size of 0 Bytes
const gc = new Storage({
keyFilename: path.join(__dirname, "../faslearn-ba6c817cfcea.json"),
projectId: "faslearn"
});
//const gc = new Storage()
//gc.getBuckets().then(x => console.log(x))
const bucket = gc.bucket('faslearn_files')
const upload = async (req, res) => {
try {
if (!req.file) {
res.status(400).send('No file uploaded.');
return;
}
//Create a new blob in the bucket and upload the file data.
const blob = bucket.file(req.file.originalname);
const blobStream = blob.createWriteStream({ resumable: false, gzip: true });
blobStream.on('error', err => {
next(err);
});
blobStream.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
const publicUrl = format(
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
);
res.status(200).send(publicUrl);
});
blobStream.end(req.file.buffer);
} catch (err) {
console.log(" some where");
res.status(500).send({
message: `Could not upload the file: ${req.file.originalname}. ${err}`,
});
}
};
I'm having troubles saving an incoming webm blob to the server. I'm using react-video-recorder on NextJS like this:
<VideoRecorder
onRecordingComplete={(videoBlob) => {
// Do something with the video...
fetch("/api/video",
method: "post",
body: videoBlob,
})
.then(function (response) {
console.log("done");
return response;
})
.catch(function (err) {
console.log('error', err);
});
console.log(url);
// output: blob:http://localhost:3000/99a5b711-f3d5-481d-9e04-8981d1be3727
console.log(videoBlob);
// output BlobĀ {size: 307028, type: "video/webm;codecs="vp8,opus""}
}}
/>
On the api side I'm trying to save the file like this. It does save something, but is only the first chunk or buffer. How can I capture and write the file to my server?
export default async (req, res) => {
fs.writeFile('test.webm', req.body, function (err) {
if (err) return console.log(err);
console.log('video saved');
} );
}
I did that task by doing this.
I saved the recordingChunks/Video blob to a state and then sent it to the Nodejs server from Reactjs Frontend
FrontEnd code:-
const blob = new Blob(context.data.recordedChunks, {
type: "video/webm",
});
const fd = new FormData()
fd.append('video', blob)
axios.post(`${process.env.REACT_APP_BASE_URL}/video/blob_video`, fd)
.then((res) => console.log(res.data))
.catch((err) => console.log(err))
Backend code:-
router.post('/blob_video', async (req, res) => {
try {
if (req.files.video !== undefined) {
const video = req.files.video // this is your file do what ever you want with it
const videoname = Date.now() + req.files.video.name + ".webm"
video.mv(`${__dirname}/../your_path/${videoname}`, err => {
if (err) {
console.log(err)
return res.json({ err })
}
})
}
res.json(req.body)
} catch (err) {
res.json({ success: false, err: err, msg: "Server error" })
console.log(err)
}
})
Using express-fileupload to upload a file you can do it with your favourite one.
During my work with the Google Drive Api v3, I am facing an issue:
If I make a call to retrieve the file list, in the response I can see, among others:
{ id: '1XYlwukNmzUrHRCh05pb9OeD1nnZdDjJU', name: 'file5.zip' },
so now I am using the fileId in the response above to try to delete:
const deleteFileById = (fileId) => {
console.log(`File id is ${fileId}`);
const drive = google.drive({ version: 'v3', authorization });
return new Promise((resolve, reject) => {
try {
drive.files.delete({
auth: authorization,
fileId: fileId,
}, (err, res) => {
if (err) return console.log('The API returned an error: ' + err);
resolve("File has been deleted");
});
} catch (error) {
console.log(error);
}
});
}
And getting as a response:
The API returned an error: Error: File not found: 1XYlwukNmzUrHRCh05pb9OeD1nnZdDjJU.
So, at this point I would say that is weird.....
This is the code i used. It works but runs a little fast sometimes. You dont need to add authorization to the call to the api its already part of the service.
async function deleteFiles(auth) {
const driveService = google.drive({version: 'v3', auth});
let response = await driveService.files.list({
q: "parents in '10krlloIS2i_2u_ewkdv3_1NqcpmWSL1w'",
fields: 'files(id)'
})
const files = response.data.files;
if (files.length) {
console.log('Files:');
files.map((file) => {
console.log(`${file.id}`);
driveService.files.delete({
"fileId": file.id
})
.then(() => console.log('FileDeleted'))
.catch((err) => console.error('failed' + err));
});
} else {
console.log('No files found.');
}
}
this is my nodejs backend file upload and save to database code.this works fine on local after deploy on aws it does't work.check this error image
for more info i have changed environment.prod. no problem for other req though.
exports.create = (req, res) => {
try {
if (!req.files) {
res.status(400).send({ message: "Please select image" });
}else
{
const file = req.files.image;
const filename = req.files.image.name;
file.mv(__dirname + '/uploads/' + filename, function (err,success) {
if (err) {
res.status(600).send({ err });
} else {
const url = req.protocol + '://' + req.get("host");
const tutorial = new Tutorial({
data: req.body.data,
image: url + "/uploads/" + filename,
creator : req.userData.userId,
uploaderName : req.body.name,
createdDateTime : new Date()
});
//Save post in the database
tutorial.save(tutorial).then(data => {
res.status(200).send({
message:"success"
});
}).catch(err => {
res.status(500).send({
message:err
});
});
}
})
}
} catch (e) {
res.status(700).send({ error: e });
}
};
can you please tell me, what i doing wrong, when try to upload image to cloudinary?
app.js
I don't need to store images on server, so i store it in memory.
var cloudinary = require('cloudinary');
cloudinary.config({
cloud_name: 'hidden',
api_key: 'hidden',
api_secret: 'hidden'
});
var multer = require('multer');
var storage = multer.memoryStorage()
var upload = multer({ storage: storage })
Site form (jade/pug)
form(action="/?_csrf="+csrfToken method="post" enctype='multipart/form-data')
input(type="file" name="avatar")
input(type="submit" value="upload")
App post
app.post('/', upload.single('avatar'), function(req, res, next){
console.log('Should be undefined:', req.file.path); //yes
console.log('Should be the buffer:', req.file.buffer); //yes
cloudinary.uploader.upload(req.file.path, function(result) { console.log(result) });
});
and i get error
{ error: { message: 'Missing required parameter - file', http_code: 400 } }
i find out how (just use Datauri):
var dUri = new Datauri();
dUri.format(path.extname('TEST').toString(), req.file.buffer);
cloudinary.uploader.upload(dUri.content, function (err, result) {
if (err) {
console.log(err);
} else {
console.log(result);
}
});
I wasnt able to upload directly from my form to server but i used a trick first i stored files on disk then try to upload my file.
i use heroku hosting it means my files will be delete after 30 min. it means i will not have any storage problem.
//#1 i collect data into storage ./image/filename
await file.mv('./image/' + filename, async (err) => {
if (err) {
console.log("server'/upload' : faild to upload error =>" + err)
res.send('Save files => error : ' + err)
}
else {
try {
const client = await pool.connect()
//await client.query(`INSERT INTO test_table(id, name) VALUES(${1},'${"test"}')`)
const result = await client.query(`INSERT into post(musicname,artistname,price, music, picture)
VALUES ('${textName}','${textArtist}','${textPrice}', '${musicname}','${filename}')`);
res.send("server'/upload' : inserting new Data is Done.")
console.log("server'/upload' : inserting new Data is Done.")
client.release();
} catch (err) {
console.error(err);
res.send("Error " + err);
}
}
})
await fileMusic.mv('./music/' + musicname, (err) => {
if (err) {
console.log(err)
res.send('save files => error')
}
})
//#2 uplaoding collected data into cloudinary
await cloudinary.v2.uploader.upload('./image/' + filename, {public_id: `${filename}`},
function(error, result){
result;
console.log(result.url, error)
});