Google Drive Api v3 "File not found" - node.js

During my work with the Google Drive Api v3, I am facing an issue:
If I make a call to retrieve the file list, in the response I can see, among others:
{ id: '1XYlwukNmzUrHRCh05pb9OeD1nnZdDjJU', name: 'file5.zip' },
so now I am using the fileId in the response above to try to delete:
const deleteFileById = (fileId) => {
console.log(`File id is ${fileId}`);
const drive = google.drive({ version: 'v3', authorization });
return new Promise((resolve, reject) => {
try {
drive.files.delete({
auth: authorization,
fileId: fileId,
}, (err, res) => {
if (err) return console.log('The API returned an error: ' + err);
resolve("File has been deleted");
});
} catch (error) {
console.log(error);
}
});
}
And getting as a response:
The API returned an error: Error: File not found: 1XYlwukNmzUrHRCh05pb9OeD1nnZdDjJU.
So, at this point I would say that is weird.....

This is the code i used. It works but runs a little fast sometimes. You dont need to add authorization to the call to the api its already part of the service.
async function deleteFiles(auth) {
const driveService = google.drive({version: 'v3', auth});
let response = await driveService.files.list({
q: "parents in '10krlloIS2i_2u_ewkdv3_1NqcpmWSL1w'",
fields: 'files(id)'
})
const files = response.data.files;
if (files.length) {
console.log('Files:');
files.map((file) => {
console.log(`${file.id}`);
driveService.files.delete({
"fileId": file.id
})
.then(() => console.log('FileDeleted'))
.catch((err) => console.error('failed' + err));
});
} else {
console.log('No files found.');
}
}

Related

Nodejs app not authenticating properly with Google Cloud Storage Bucket?

Building an app user profile photo component. I set up a Google Cloud storage bucket without public access and using fine-grained ACLs, and a service acct with Storage Admin role. I downloaded JSON key, placed into root directory alongside package.json, and referenced in my file upload/download controller:
const processFile = require("../middleware/upload");
const { format } = require("util");
const { Storage } = require("#google-cloud/storage");
const storage = new Storage({ keyFilename: "my-service-account.json" });
const bucket = storage.bucket("my-gcp-storage-bucket");
const upload = async (req, res) => {
try {
await processFile(req, res);
if (!req.file) {
return res.status(400).send({ message: "Please upload a file!" });
}
const blob = bucket.file(req.file.originalname);
const blobStream = blob.createWriteStream({
resumable: false,
});
blobStream.on("error", (err) => {
res.status(500).send({ message: err.message });
});
blobStream.on("finish", async (data) => {
const publicUrl = format(
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
);
try {
await bucket.file(req.file.originalname).makePublic();
} catch {
return res.status(500).send({
message:
`Uploaded the file successfully: ${req.file.originalname}, but public access is denied!`,
url: publicUrl,
});
}
res.status(200).send({
message: "Uploaded the file successfully: " + req.file.originalname,
url: publicUrl,
});
});
blobStream.end(req.file.buffer);
} catch (err) {
console.log(err);
if (err.code == "LIMIT_FILE_SIZE") {
return res.status(500).send({
message: "File size cannot be larger than 2MB!",
});
}
res.status(500).send({
message: `Could not upload the file: ${req.file.originalname}. ${err}`,
});
}
};
const getListFiles = async (req, res) => {
try {
const [files] = await bucket.getFiles();
let fileInfos = [];
files.forEach((file) => {
fileInfos.push({
name: file.name,
url: file.metadata.mediaLink,
});
});
res.status(200).send(fileInfos);
} catch (err) {
console.log(err);
res.status(500).send({
message: "Unable to read list of files!",
});
}
};
const download = async (req, res) => {
try {
const [metaData] = await bucket.file(req.params.name).getMetadata();
res.redirect(metaData.mediaLink);
} catch (err) {
res.status(500).send({
message: "Could not download the file. " + err,
});
}
};
module.exports = {
upload,
getListFiles,
download,
};
I am able to upload into my bucket and list files - but I cannot download or see image previews in my React frontend. I get this message when trying to use the download API:
Anonymous caller does not have storage.objects.get access to the Google Cloud Storage object. Permission 'storage.objects.get' denied on resource (or it may not exist).
These specifically are what does not work:
const getListFiles = async (req, res) => {
try {
const [files] = await bucket.getFiles();
let fileInfos = [];
files.forEach((file) => {
fileInfos.push({
name: file.name,
url: file.metadata.mediaLink,
});
});
res.status(200).send(fileInfos);
} catch (err) {
console.log(err);
res.status(500).send({
message: "Unable to read list of files!",
});
}
};
const download = async (req, res) => {
try {
const [metaData] = await bucket.file(req.params.name).getMetadata();
res.redirect(metaData.mediaLink);
} catch (err) {
res.status(500).send({
message: "Could not download the file. " + err,
});
}
};
I can't figure out what's wrong here, as I can upload and list files - just not download. Any ideas?
Make sure that the service account has necessary permissions to download files .Refer to this document1 & document2 for more information.
The authenticated user must have the storage.objects.get IAM
permission to use this method. To return object ACLs, the
authenticated user must also have the storage.objects.getIamPolicy
permission.
Also initialize gcloud environment gcloud init then set up application default credentials by running gcloud auth application-default login.

How to download a spreadsheet from google drive using a service account and typescript/node.js [duplicate]

This question already has an answer here:
How to download dynamic files from google drive
(1 answer)
Closed 13 days ago.
I'm trying to download all the spreadsheets contained in a folder using a service account.
I cannot find a solution, I hope someone could help me.
I authenticate and I get successfully drive.files.list but then I can't download files.
This is my code
import { google } from "googleapis";
import { privatekey } from "./privatekey";
import { createWriteStream, writeFileSync } from "fs";
let jwtClient = new google.auth.JWT(privatekey.client_email, undefined, privatekey.private_key, [
"https://www.googleapis.com/auth/drive",
]);
//authenticate request
jwtClient.authorize(function (err, tokens) {
if (err) {
console.log(err);
return;
} else {
console.log("Successfully connected");
}
});
const folder_id = FOLDER_ID
let drive = google.drive("v3");
drive.files.list(
{
auth: jwtClient,
q: `'${folder_id}' in parents and trashed=false`,
},
function (err, response) {
if (err) {
console.log("The API returned an error: " + err);
return;
}
var files = response?.data.files;
if (files?.length == 0) return;
files?.forEach(async (file) => {
let fileId = file.id;
fileId == null ? (fileId = undefined) : (fileId = fileId);
//writeFileSync(`./cartella/${file.id}.xlsx`, "");
prova(jwtClient, fileId, file.mimeType);
//await getFileFromStream(jwtClient, fileId, file.mimeType);
});
}
);
function getFileFromStream(auth: any, fileId: any, mimeType: any) {
const destPath = `./cartella/${fileId}.xls`;
const dest = createWriteStream(destPath);
return new Promise(async (resolve, reject) => {
const drive = google.drive({ version: "v3", auth });
drive.files.get({
fileId: fileId,
alt: "media",
}),
(err: any, res: any): void => {
res.data
.on("end", () => {
console.log("Done");
})
.on("error", (error: any) => {
console.log("Error during download", error);
})
.pipe(dest);
};
});
}
function prova(auth: any, fileId: any, mimeType: any) {
const destPath = `./cartella/${fileId}.xls`;
const dest = createWriteStream(destPath);
const drive = google.drive({ version: "v3", auth });
drive.files.export({ fileId: fileId, mimeType: mimeType },{responseType: "stream"}, (err: any, res: any) => {
if (err) {
// handle error
console.log("error: ",err)
} else {
if (res == null) return
res.data
.on("end", function () {
console.log("Done");
})
.on("error", function (err: any) {
console.log("Error during download", err);
})
.pipe(dest);
}})
}
First of all I added the service account to the editors of the folder in google drive
The function getFileFromStream returns a big error, but I think that the most interesting thing is this one
domain: 'global',
reason: 'fileNotDownloadable',
message: 'Only files with binary content can be downloaded. Use Export with Docs Editors files.', locationType: 'parameter',
location: 'alt' } ]
So I tried to use drive.files.export, but the response is
status: 400,
statusText: 'Bad Request',
request: {
responseURL: 'https://www.googleapis.com/drive/v3/files/file_id/export?mimeType=application%2Fvnd.google-apps.spreadsheet'
}
I also tried a different authentication method like the one proposed here:
Setting up Google Drive API on NodeJS using a service account
but it still does't work
What am I doing wrong?
The following method will download files that are not Google Drive mime types. You only need to use export if it is a Google Drive mime types and you need to covert it when its download for example a Google sheet converted to an excel file, or a Google docs file converted to a Microsoft word file.
def download_file(service, file_id):
try:
# Call the Drive v3 API
# Get file name, so we can save it as the same with the same name.
file = service.files().get(fileId=file_id).execute()
file_name = file.get("name")
print(f'File name is: {file_name}')
# Call the Drive v3 API
# get the file media data
request = service.files().get_media(fileId=file_id)
fh = io.BytesIO()
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
print("Download %d%%" % int(status.progress() * 100))
# The file has been downloaded into RAM, now save it in a file
fh.seek(0)
with open(file_name, 'wb') as f:
shutil.copyfileobj(fh, f, length=131072)
except HttpError as error:
# TODO(developer) - Handle errors from drive API.
print(f'An error occurred: {error}')
Solution found here:
How to download dynamic files from google drive
Thank you Tanaike for your kind help

How to change access to a particular folder of google drive using node.js?

I'm learning about google drive API for node.js. I'm curious to know how can I change access type of a particular file/folder of google drive in the node.js?
Means let's suppose I've got a folder named Batman in a google drive. Inside the batman folder, there are 3 files, one file's access type is public and other's access type is private. So, how can I iterate through the batman folder to change access type of the sub-files?
Code:(It gets the 10 file names from the google drive)
const fs = require('fs');
const readline = require('readline');
const {google} = require('googleapis');
const SCOPES = ['https://www.googleapis.com/auth/drive'];
const TOKEN_PATH = 'token.json';
// Load client secrets from a local file.
fs.readFile('credentials.json', (err, content) => {
if (err) return console.log('Error loading client secret file:', err);
// Authorize a client with credentials, then call the Google Drive API.
authorize(JSON.parse(content), listFiles);
});
function authorize(credentials, callback) {
const {client_secret, client_id, redirect_uris} = credentials.installed;
const oAuth2Client = new google.auth.OAuth2(
client_id, client_secret, redirect_uris[0]);
// Check if we have previously stored a token.
fs.readFile(TOKEN_PATH, (err, token) => {
if (err) return getAccessToken(oAuth2Client, callback);
oAuth2Client.setCredentials(JSON.parse(token));
callback(oAuth2Client);
});
}
function getAccessToken(oAuth2Client, callback) {
const authUrl = oAuth2Client.generateAuthUrl({
access_type: 'offline',
scope: SCOPES,
});
console.log('Authorize this app by visiting this url:', authUrl);
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
rl.question('Enter the code from that page here: ', (code) => {
rl.close();
oAuth2Client.getToken(code, (err, token) => {
if (err) return console.error('Error retrieving access token', err);
oAuth2Client.setCredentials(token);
// Store the token to disk for later program executions
fs.writeFile(TOKEN_PATH, JSON.stringify(token), (err) => {
if (err) return console.error(err);
console.log('Token stored to', TOKEN_PATH);
});
callback(oAuth2Client);
});
});
}
function listFiles(auth) {
const drive = google.drive({version: 'v3', auth});
drive.files.list({
pageSize: 10,
fields: 'nextPageToken, files(id, name)',
}, (err, res) => {
if (err) return console.log('The API returned an error: ' + err);
const files = res.data.files;
if (files.length) {
console.log('Files:');
files.map((file) => {
console.log(`${file.name} (${file.id})`);
});
} else {
console.log('No files found.');
}
console.log(files)
});
}
There are several factors you have to take into consideration to change the access type of a certain file/folder. Share files, folders and drives states all the details and parameters that can be changed. For doing it programmatically. Follow the next steps:
1) List files:
List all the files in the specific folder you want to change using the q parameter, unfortunately, there is a bug with the google-api-nodejs-client, in which the q parameter will not return the files. The bug is reported here, so you could comment or give a thumb up to let know Google you also have that issue. As a workaround I recommend you using axios.
const queryFoldersWithAxios = async () => {
const params = {
q: "'<YOUR_FOLDER_ID>' in parents",
fields: "files(name,id)"
};
const headers = {
"Authorization": "Bearer <YOUR_ACCESS_TOKEN>",
"Content-Type": "application/json"
}
return await axios.get("https://www.googleapis.com/drive/v3/files", {params, headers})
}
Files: list
2) Create Permissions
You can create a new permission by just passing the file id.
const createPermissions = async (drive, fileId) => {
// Return the Promise result after completing its task
return new Promise((resolve, reject) => {
const body = {
"role": "reader",
"type": "anyone"
};
return drive.permissions.create({
fileId,
resource: body
},(err, results) => err ? reject(err) : resolve(results));
});
}
Permissions: create
3) List File permissions
You can verify all permissions in a file by listing its permissions and you can even get the ids of the permissions.
const listFilePermissions = async (drive, fileId) => {
// Return the Promise result after completing its task
return new Promise((resolve, reject) => {
return drive.permissions.list({ fileId }, {
"fields": 'permissions'
},(err, results) => err ? reject(err) : resolve(results))
});
}
Permissions: list
Notice
Take the second step as examples of what you could do, please read Share files, folders and drives to learn all the possible combinations you could do. Also, you can play with the Try this API to test the permissions before implementing them in code.

Downloading an image from Drive API v3 continuously gives corrupt images. How should I decode the response from the promise?

I'm trying to download images from a Google share Drive using the API v3. The download itself will succeed but the image can't be seen. Opening the image from the MacOS finder just results in a spinner.
I started using the example from the documentation (here: https://developers.google.com/drive/api/v3/manage-downloads):
const drive = google.drive({version: 'v3', auth});
// ....
var fileId = '0BwwA4oUTeiV1UVNwOHItT0xfa2M';
var dest = fs.createWriteStream('/tmp/photo.jpg');
drive.files.get({
fileId: fileId,
alt: 'media'
})
.on('end', function () {
console.log('Done');
})
.on('error', function (err) {
console.log('Error during download', err);
})
.pipe(dest);
however that fails because the .on() method doesn't exist. The exact error is "TypeError: drive.files.get(...).on is not a function"
The .get() method returns a promise. The response of the promise contains data that, depending on the config is either a stream, a blob or arraybuffer. For all options, when I write the response data to a file, the file itself becomes unviewable and has the wrong size. The actual code (typescript, node.js) for the arraybuffer example is below. Similar code for blob (with added name and modifiedDate) and for stream give the same result.
const downloader = googleDrive.files.get({
fileId: file.id,
alt: 'media',
}, {
responseType: 'arraybuffer',
});
return downloader
.then((response) => {
const targetFile = file.id + '.' + file.extension;
fs.writeFileSync(targetFile, response.data);
return response.status;
})
.catch((response) => {
logger.error('Error in Google Drive service download: ' + response.message);
return response.message;
}
);
}
So the questions are:
what is the correct way to handle a download through Google Drive API v3 ?
do I need to handle any formatting of the response data ?
All help greatly appreciated!
Thanks
You want to download a file from Google Drive using googleapis with Node.js.
You have already been able to use Drive API.
If my understanding is correct, how about this answer?
Pattern 1:
In this pattern, arraybuffer is used for responseType.
Sample script:
const drive = google.drive({ version: "v3", auth });
var fileId = '###'; // Please set the file ID.
drive.files.get(
{
fileId: fileId,
alt: "media"
},
{ responseType: "arraybuffer" },
function(err, { data }) {
fs.writeFile("sample.jpg", Buffer.from(data), err => {
if (err) console.log(err);
});
}
);
In this case, Buffer.from() is used.
Pattern 2:
In this pattern, stream is used for responseType.
Sample script:
const drive = google.drive({ version: "v3", auth });
var fileId = '###'; // Please set the file ID.
var dest = fs.createWriteStream("sample.jpg");
drive.files.get(
{
fileId: fileId,
alt: "media"
},
{ responseType: "stream" },
function(err, { data }) {
data
.on("end", () => {
console.log("Done");
})
.on("error", err => {
console.log("Error during download", err);
})
.pipe(dest);
}
);
Note:
If an error occurs, please use the latest version of googleapis.
From your question, it seems that you have already been able to retrieve the file you want to download using your request, while the file content cannot be opened. But if an error occurs, please try to add supportsAllDrives: true and/or supportsTeamDrives: true in the request.
References:
Download files
google-api-nodejs-client/samples/drive/download.js
If I misunderstood your question and this was not the direction you want, I apologize.
Posting a third pattern for completeness using async/await and including teamdrive files.
async function downloadFile(drive: Drive, file: Schema$File, localDir: string = "/tmp/downloads") {
if (!fs.existsSync(localDir)) {
fs.mkdirSync(localDir)
}
const outputStream = fs.createWriteStream(`${localDir}/${file.name}`);
const { data } = await drive.files.get({
corpora: 'drive',
includeItemsFromAllDrives: true,
supportsAllDrives: true,
fileId: file.id,
alt: "media",
}, {
responseType: 'stream',
})
await pipeline(data, outputStream)
console.log(`Downloaded file: ${localDir}/${file.name}`)
}
If someone is looking for a solution is 2023, here you go!
const downloadFile = async (file) => {
const dirPath = path.join(process.cwd(), '/images');
if (!fs.existsSync(dirPath)) {
fs.mkdirSync(dirPath, { recursive: true });
}
const filePath = `${dirPath}/${file.name}.jpg`;
const destinationStream = fs.createWriteStream(filePath);
try {
const service = await getService();
const { data } = await service.files.get(
{ fileId: file.id, alt: 'media' },
{ responseType: 'stream' }
);
return new Promise((resolve, reject) => {
data
.on('end', () => {
console.log('Done downloading file.');
resolve(filePath);
})
.on('error', (err) => {
console.error('Error downloading file.');
reject(err);
})
.pipe(destinationStream);
});
} catch (error) {
throw error;
}
};

Promise only resolves on the first request

I am developing an API to create a warehouse structure. Because we are using a microservice architecture I need to make a request via rabbitmq to another microservice to generate the address for the new warehouse.
Therefore I use the ampq consume function wrapped in a function which returns a promise. When I hit the endpoint the first time the promise gets resolved and I can continue with my data. But in the second request, the promise will not get resolved.
Maybe it's for an obvious reason but at the moment I don't get it.
So here is my code:
routes.js
router.post('/', (req, res) => {
...
const validate = ajv.compile(warehoseSchema);
const valid = validate(req.body);
if (valid) {
sendToQueue('addressMgmt', req.body.address);
consume()
.then((result) => {
const {
id_address: idAddress,
license_plate: licensePlate,
town,
} = result.body;
createWarehouseHandler(
customernumber, req.body, idAddress, licensePlate, town,
)
.then((insertId) => {
res.json({
id: 'warehouses02',
message: `Warehouse with id ${insertId} successfully created`,
});
})
.catch((err) => {
res.status(err.status).json({
id: err.id,
message: err.message || err.sqlMessage,
});
});
}).catch((err) => {
res.status(err.status).json({
id: err.id,
message: err.message || err.sqlMessage,
});
});
} else {
res.status(417).json({
id: 'warehouses01',
message: `Invalid JSON: ${ajv.errorsText(validate.errors)}`,
});
}
});
const consume = () => new Promise((resolve, reject) => {
const q = 'warehouseMgmt';
amqpCh.consume(q, (msg) => {
const message = JSON.parse(msg.content.toString());
if (Object.keys(message).includes('body')) {
resolve(message);
} else {
const err = new Error();
err.status = 500;
err.id = 'rabbit01';
err.message = 'No message was cosumed';
reject(err);
}
}, { noAck: true });
});
On the first request consume().then() gets called but on the second and following requests, it doesn't.
Thanks for your help

Resources