Google drive API downloading file nodejs - node.js

Im trying to get the contents of a file using the google drive API v3 in node.js.
I read in this documentation I get a stream back from drive.files.get({fileId, alt: 'media'})but that isn't the case. I get a promise back.
https://developers.google.com/drive/api/v3/manage-downloads
Can someone tell me how I can get a stream from that method?

I believe your goal and situation as follows.
You want to retrieve the steam type from the method of drive.files.get.
You want to achieve this using googleapis with Node.js.
You have already done the authorization process for using Drive API.
For this, how about this answer? In this case, please use responseType. Ref
Pattern 1:
In this pattern, the file is downloaded as the stream type and it is saved as a file.
Sample script:
var dest = fs.createWriteStream("###"); // Please set the filename of the saved file.
drive.files.get(
{fileId: id, alt: "media"},
{responseType: "stream"},
(err, {data}) => {
if (err) {
console.log(err);
return;
}
data
.on("end", () => console.log("Done."))
.on("error", (err) => {
console.log(err);
return process.exit();
})
.pipe(dest);
}
);
Pattern 2:
In this pattern, the file is downloaded as the stream type and it is put to the buffer.
Sample script:
drive.files.get(
{fileId: id, alt: "media",},
{responseType: "stream"},
(err, { data }) => {
if (err) {
console.log(err);
return;
}
let buf = [];
data.on("data", (e) => buf.push(e));
data.on("end", () => {
const buffer = Buffer.concat(buf);
console.log(buffer);
});
}
);
Reference:
Google APIs Node.js Client

Related

my videos on google bucket can not fast forward or rewind

So i built an e-learning platform with node.js and vue.js, and i am using GCP buckets to store my videos privately, everything works perfectly asides the fact that my videos can not fast forward or rewind, if you try moving the video to a specific position (maybe towards the end of the video) it returns to the same spot where you were initially, at first i taught it was a vue problem, but i tried playing this videos from my GCP bucket dashboard directly but it does the same thing. it only works fine when i use the firefox browser.
i am using the Uniform: No object-level ACLs enabled access control and the Not public permission settings. I am new the GCP, i have no idea what could be the problem
here is the node.js function i am using
const upload = async (req, res) => {
try {
if (!req.file) {
res.status(400).send('No file uploaded.');
return;
}
const gcsFileName = `${Date.now()}-${req.file.originalname}`;
var reader = fs.createReadStream('uploads/'+req.file.originalname);
reader.pipe(
bucket.file(gcsFileName).createWriteStream({ resumable: false, gzip: true })
.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
const publicUrl = format(
`https://storage.googleapis.com/bucketname/` + gcsFileName
);
// console.log('https://storage.googleapis.com/faslearn_files/' + gcsFileName)
fs.unlink('uploads/' + req.file.originalname, (err) => {
if (err) {
console.log("failed to delete local image:" + err);
} else {
console.log('successfully deleted local image');
}
});
res.status(200).send(publicUrl);
})
.on('error', err => {
console.log(err);
return
})
//.end(req.file.buffer)
)
// Read and display the file data on console
reader.on('data', function (chunk) {
console.log('seen chunk');
});
} catch (err) {
console.log(" some where");
res.status(500).send({
message: `Could not upload the file: ${req.file.originalname}. ${err}`,
});
}
};
the issue was comming from the way i encoded the video, i was supposed to use the blob but i used the pipe

Unable to send photo using Form Submit

I've a photo and a paid upload service : http://example.com/in.php .
I'm not able to upload a given jpeg file using this code. It is telling me that invalid file format uploaded. But using file linux command I can see it is JPEG format. Is there any problem in this code?
fs.readFile('/tmp/photo.jpg'', 'utf8', function(err, contents) {
var b64 = new Buffer(contents);
var s = b64.toString('base64');
var request = require('request')
request.post('http://example.com/in.php', {
form: {
method:'base64',
key:'cplbhvnmvdn4bjxxchzgqyjz7rf9fy8w',
body:s,
}
}, function (err, res, body) {
console.log("body=",body);
console.log("res=",res);
})
});
I see no mistakes in your process of converting jpeg to base64. However, I would suggest a workaround to use a small image-to-base64 node package.
Also, there's a higher chance of something being wrong in the post request, maybe the upload service API is not accepting the base64 format, maybe the API key is not authorized. Please read their API docs carefully and do as it says & also console log the error code you are receiving.
After all, try something like this with Axios.
const image2base64 = require('image-to-base64');
image2base64("path/to/file.jpg") // you can also to use url
.then(
(response) => {
console.log(response); //cGF0aC90by9maWxlLmpwZw==
axios.post('http://example.com/in.php', base64, {
headers: {
"Content-Type": "multipart/form-data"
},
}).then((response) => {
console.log(response)
}).catch(function (error) {
console.log("Error in uploading.");
});
},
}
)
.catch(
(error) => {
console.log(error); //Exepection error....
}
)

Get file buffer using Google Drive API and async/await

I'm trying to get the buffer of some drive pdf files so I can parse it and use the data.
I've managed to get the file names and id using async/await and a "drive.files.list" wrapped with promise. Now I need to use the file ids to get the buffer and then read it.
The function I need should return a promise that I can wait (using await) to be fulfilled to get a buffer. (My parser works fine when I get pdf buffer from website responses)
function getBuffer(drive, file) {
return new Promise((resolve, reject) => {
/////Google Auth
var jwToken = new google.auth.JWT(
key.client_email,
null,
key.private_key, ["https://www.googleapis.com/auth/drive"],
null
);
jwToken.authorize((authErr) => {
if (authErr) {
return reject([false, "Auth Error: " + authErr]);
}
});
drive.files.get({
auth: jwToken,
fileId: file.id,
alt: 'media',
supportsAllDrives: true
}, function (err, res) {
if (err) {
return reject('The API returned an error: ' + err);
};
console.log(res);
const buffer = res;
resolve(buffer);
});
});
}
And I use it this way:
var buffer = await getBuffer(drive,files[i]);
The output I get in "console.log(res)" is something like this:
...
��M�7�|�ı�[��Ξ�A����EBS]��P��r�����j�3�|�I.��i�+ϢKU���U�:[�═�,^߻t덲�v��=}'*8���ѻ��#ғ�s��No��-��q8E9�/f� �(�`�j'3
"╚�-��� ������[jp&��╚k��M��vy� In�:a�զ�OlN��u����6�n���q�/Y�i4�?&%��q�,��p╚.ZV&n�Ɨ��2G������X����Y
D],�ggb�&�N���G����NS�Lח\U�^R|_f<��f*�|��]�{�3�-P�~�CS��t��>g�Y��#�#7Wjۋ╗=�5�����#ջ���5]>}&v�╝═�wg��eV�^>�#�{��Ѿ��ޤ��>O�� z�?{8Ij�0╗B�.�Cjm�4������║��m�,╗�������O���fS��ӂcE��g�3(�G��}d^O������7����|�
H�N��;
{��x�bȠ�׮�i]=���~��=��ٟ<��C��
wi��'a�-��p═M�6o��ϴ��ve��+��'
...
And when I try to use the parser (pdf2json) I get this error:
"An error occurred while parsing the PDF: stream must have data"
Thanks in advance :D
You want to download a file from Google Drive.
You want to convert the downloaded data to the buffer.
You have already been able to download files from Google Drive using googleapis with Node.js.
If my understanding is correct, how about this modification? In this modification, the file is downloaded as the stream type and the data is converted to the buffer.
Modified script:
From:
drive.files.get({
auth: jwToken,
fileId: file.id,
alt: 'media',
supportsAllDrives: true
}, function (err, res) {
if (err) {
return reject('The API returned an error: ' + err);
};
console.log(res);
const buffer = res;
resolve(buffer);
});
To:
drive.files.get(
{
auth: jwToken,
fileId: file.id,
alt: "media",
supportsAllDrives: true
},
{ responseType: "stream" },
function(err, { data }) {
if (err) {
return reject("The API returned an error: " + err);
}
let buf = [];
data.on("data", function(e) {
buf.push(e);
});
data.on("end", function() {
const buffer = Buffer.concat(buf);
console.log(buffer);
// fs.writeFile("filename", buffer, err => console.log(err)); // For testing
resolve(buffer);
});
}
);
Note:
As a test case, I could confirm that when buffer is saved to a file using fs.writeFile("filename", buffer, err => console.log(err));, the downloaded file can be created.
Reference:
google-api-nodejs-client
If I misunderstood your question and this was not the direction you want, I apologize.

Catch superagent request error before piping

I'm trying to pipe a file from service A trough service B into my Postman cliente. Service A builds an delivers a CSV file, and service B (nodejs) has to pipe into my client.
After researching a lot I have managed to successfully pipe the files into service B and then into Postman. Now I want to handle the ugly cases: what if the request token is invalid? What if I can't find the file?
As of this moment, I have found zero documentation or examples on how successfully handle errors while piping a request using superagent.
This is what I have so far
router.post("/csv", (req, res) => {
download_csv(req.get("Authorization"), req.body.ids)
.then((response) => {
res.sendFile(path.resolve(response));
})
.catch((err) => {
res.status(error.status).json(error.response.body);
})
});
function download_csv(token, ids) {
const stream = fs.createWriteStream("filters.csv")
let request = agent
.post(`${profiles}/api/documents/csv`)
.set("authorization", token)
.send({
ids: ids,
action: DOWNLOAD_CSV_PROFILES
})
request.on("response", res => {
// Maybe I can use abort to handle this thing, but can't figure out how!
// if (res.status !== 200) request.abort()
console.log(res.status)
})
request.on("abort", () => {
console.log("aborted")
return new Promise((resolve, reject) => {
resolve("request aborted")
})
})
request.pipe(stream)
return streamToPromise(stream);
}
function streamToPromise(stream) {
return new Promise((resolve, reject) => {
stream.on("error", (err) => {
console.log("error in error")
})
stream.on("finish", () => {
console.log("File saved")
resolve(stream.path);
});
});
}
This code handles the creation of the files correctly. When I fake the token or misspell the Authorization header, I get a correct 401 response, but a file gets written anyway with its contents being the authentication error.
Can anyway give me a hint on how to:
actually catch and manage the request when fails
in such case, how to escape the piping by going back to the express context and just returning a failed express request?
Many thanks!
If I understand you correctly, simply create the fs write stream in on('response') and make a small fix on the resultion.
function download_csv(token, ids) {
return new Promise((resolve, reject) => {
let request = agent
.post(`${profiles}/api/documents/csv`)
.set("authorization", token)
.send({
ids: ids,
action: DOWNLOAD_CSV_PROFILES
})
request.on("response", res => {
// Maybe I can use abort to handle this thing, but can't figure out how!
if (res.status === 200) {
res
.on("end", resolve)
.pipe(fs.createWriteStream("filters.csv"));
} else {
reject();
}
})
request.on("abort", reject);
});
}
I'm not sure what is the "request" you're using - but assuming it's actually the request npm module that will help.
Ideally, upload the file to a temporary directory and move it when the promise is resolved, delete on rejected. This way you'll solve the issue of partial downloads.
If you want to make any on-the-fly transforms, check out my "scramjet". It'll make everything easier with promises.

Node.js Streaming/Piping Error Handling (Change Response Status on Error)

I have millions of rows in my Cassandra db that I want to stream to the client in a zip file (don't want a potentially huge zip file in memory). I am using the stream() function from the Cassandra-Node driver, piping to a Transformer which extracts the one field from each row that I care about and appends a newline, and pipes to archive which pipes to the Express Response object. This seems to work fine but I can't figure out how to properly handle errors during streaming. I have to set the appropriate headers/status before streaming for the client, but if there is an error during the streaming, on the dbStream for example, I want to clean up all of the pipes and reset the response status to be something like 404. But If I try to reset the status after the headers are set and the streaming starts, I get Can't set headers after they are sent. I've looked all over and can't find how to properly handle errors in Node when piping/streaming to the Response object. How can the client tell if valid data was actually streamed if I can't send a proper response code on error? Can anyone help?
function streamNamesToWriteStream(query, res, options) {
return new Promise((resolve, reject) => {
let success = true;
const dbStream = db.client.stream(query);
const rowTransformer = new Transform({
objectMode: true,
transform(row, encoding, callback) {
try {
const vote = row.name + '\n';
callback(null, vote);
} catch (err) {
callback(null, err.message + '\n');
}
}
});
// Handle res events
res.on('error', (err) => {
logger.error(`res ${res} error`);
return reject(err);
});
dbStream.on('error', function(err) {
res.status(404).send() // Can't set headers after they are sent.
logger.debug(`dbStream error: ${err}`);
success = false;
//res.end();
//return reject(err);
});
res.writeHead(200, {
'Content-Type': 'application/zip',
'Content-disposition': 'attachment; filename=myFile.zip'
});
const archive = archiver.create('zip');
archive.on('error', function(err) { throw err; });
archive.on('end', function(err) {
logger.debug(`Archive done`);
//res.status(404).end()
});
archive.pipe(res, {
//end:false
});
archive.append(dbStream.pipe(rowTransformer), { name: 'file1.txt' });
archive.append(dbStream.pipe(rowTransformer), { name: 'file1.txt' });
archive.finalize();
});
}
Obviously it's too late to change the headers, so there's going to have to be application logic to detect a problem. Here's some ideas I have:
Write an unambiguous sentinel of some kind at the end of the stream when an error occurs. The consumer of the zip file will then need to look for that value to check for a problem.
Perhaps more simply, have the consumer execute a verification on the integrity of the zip archive. Presumably if the stream fails the zip will be corrupted.

Resources