Specifying projectId when creating GCP Storage bucket - node.js

I am trying to create a GCP Storage Bucket using the Node.js library.
I've been using the steps here:
https://cloud.google.com/storage/docs/creating-buckets#storage-create-bucket-nodejs
And code pasted below.
The challenge is that my bucket keeps being created in the wrong project. My project is set in my gcloud cli, it's set in my node environment, and it's set in my script.
Is there some way to set the project in the values you pass to the library's createBucket function?
/**
* TODO(developer): Uncomment the following line before running the sample.
*/
// const bucketName = 'Name of a bucket, e.g. my-bucket';
// const storageClass = 'Name of a storage class, e.g. coldline';
// const location = 'Name of a location, e.g. ASIA';
// Imports the Google Cloud client library
const {Storage} = require('#google-cloud/storage');
// Creates a client
const storage = new Storage();
async function createBucketWithStorageClassAndLocation() {
// For default values see: https://cloud.google.com/storage/docs/locations and
// https://cloud.google.com/storage/docs/storage-classes
const [bucket] = await storage.createBucket(bucketName, {
location,
[storageClass]: true,
});
console.log(
`${bucket.name} created with ${storageClass} class in ${location}.`
);
}
createBucketWithStorageClassAndLocation();

You can specify projectId when you initialize the Storage class:
const storage = new Storage({
projectId: 'my-project-id',
keyFilename: '/path/to/keyfile.json'
});
Source

Related

Google Cloud Function - read the CONTENT of a new file created a bucket using NodeJS

In GCP (not firebase) I have a bucket and a function which will be called when a new file was created in the bucket. Works great.
/**
* Triggered from a change to a Cloud Storage bucket.
*/
exports.mycompany_upload_file = (event, context) => {
const gcsEvent = event;
const filename = gcsEvent.name;
const bucketName = event.bucket;
console.log(`=====================================================`);
console.log(`Event Type: ${context.eventType}`);
console.log(`Bucket: ${bucketName}`);
console.log(`Datei: ${filename}`);
console.log(`=====================================================`);
// now I would like to open that file and read it line-by-line
How do I address that file? What's the path of that file?
Can I use standard node libraries like 'fs'
I found this post that might help you, but looks like basically the answer is as follows:
Note that it will store the result in a ramdisk, so you'll need enough RAM available to your function to download the file.
var storage = require('#google-cloud/storage');
const gcs = storage({projectId: "<your_project>"});
const bucket = gcs.bucket("<your_bucket>");
const file = bucket.file("<path/to/your_file>")
exports.gcstest = (event, callback) => {
file.download({destination:"/tmp/test"}, function(err, file) {
if (err) {console.log(err)}
else{callback();}
})
};
For more information you can check the documentation for Google Cloud Storage: Node.js Client

Firebase function: Error: unable to open for write

so I was trying to implement a firebase function. I went to firebase functions repository example and copied it. Everything is working properly "Deploy complete!" with no signs of an error. However, when I'm trying to upload image to the firebase store, firebase functions can't open it?
There is a code that I used:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp()
const {Storage} = require("#google-cloud/storage");
const gcs = new Storage();
const path = require('path');
const os = require('os');
const fs = require('fs');
const sharp = require("sharp");
exports.generateThumbnail = functions.storage.object().onFinalize(async (object) => {
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const contentType = object.contentType; // File content type.
const metageneration = object.metageneration; // Number of times metadata has been generated. New objects have a value of 1.
if (!contentType.startsWith('image/')) {
return console.log('This is not an image.');
}
const fileName = path.basename(filePath);
if (fileName.startsWith('thumb_')) {
return console.log('Already a Thumbnail.');
}
const bucket = admin.storage().bucket(fileBucket);
const tempFilePath = path.join(os.tmpdir(), fileName);
console.log('Created temporary path',tempFilePath);
const metadata = {
contentType: contentType,
};
await bucket.file(filePath).download({destination: tempFilePath});
console.log('Image downloaded locally to', tempFilePath);
const thumbFileName = `thumb_${fileName}`;
const thumbFilePath = path.join(path.dirname(filePath), thumbFileName);
console.log('Created thumb path',tempFilePath);
const size = 200;
/*await*/ sharp(tempFilePath).resize(size,size).toFile(thumbFilePath);
await bucket.upload(tempFilePath, {
destination: filePath,
metadata: metadata,
});
return fs.unlinkSync(tempFilePath);
});
Error:
Cloud Functions has a read-only filesystem except for the /tmp directory. You have to make sure you are writing your data to a path /tmp/your-file
The only writeable part of the filesystem is the /tmp directory, which
you can use to store temporary files in a function instance. This is a
local disk mount point known as a "tmpfs" volume in which data written
to the volume is stored in memory. Note that it will consume memory
resources provisioned for the function.
Cloud Functions Execution Environment
I guess that this will work also in Firebase, else please comment:
gcsfs
If you put gcsfs in the "requirements.txt" and import gcsfs in the Python code, you can use the module like this (as an example taken from Have a look at an example for saving a csv:
fs = gcsfs.GCSFileSystem(project=MY_PROJECT)
fs.ls(BUCKET_NAME)
# or choose 'w' here:
with fs.open(filepath, 'wb') as outcsv:
...
Further links:
How to open a file from google cloud storage into a cloud function
https://gcsfs.readthedocs.io/en/latest/index.html#examples

google cloud storage node client - credentials not working as expected

I'm working on a project where I need to access some objects in a gcloud bucket
It work fine with the GOOGLE_APPLICATION_CREDENTIALS env variable, but not when I try using the keyfilename option of the Storage constructor. Here is a minimal example that shows what is going on:
const {Storage, File} = require('#google-cloud/storage');
const path = require("path");
// Fake bucket and file names
const bucketname = "my-bucket";
const filename = "test-file.txt";
// Points to a valid key file for a service account with read & write access to the bucket
const keyPath = path.resolve("./key.json");
const storage = new Storage({keyFile: keyPath}); // note that I'm providing a keyFile here
const bucket = storage.bucket(bucketname);
const file = new File(bucket, filename);
async function main() {
console.log("try without GOOGLE_APPLICATION_CREDENTIALS");
try {
await file.download({ destination: "./test.txt" });
console.log("ok");
return;
} catch (e) {
console.log(e.message);
}
console.log("try with GOOGLE_APPLICATION_CREDENTIALS");
try {
process.env["GOOGLE_APPLICATION_CREDENTIALS"] = keyPath;
await file.download({ destination: "./test.txt" });
console.log("ok");
return;
} catch (e) {
console.log("failed with GOOGLE_APPLICATION_CREDENTIALS");
console.log(e.message);
}
}
main();
I would expect the output for this to be:
try without GOOGLE_APPLICATION_CREDENTIALS
ok
but instead I get
try without GOOGLE_APPLICATION_CREDENTIALS
Anonymous caller does not have storage.objects.get access to my-bucket/test-file.txt.
try with GOOGLE_APPLICATION_CREDENTIALS
ok
What is going on here ?
Well, this is dumb but it might help somebody:
the Storage constructor options object has both a KeyFile and a keyFilename option, with the exact same description. (to be precise, StorageOptions inherits those from GoogleAuthOptions)
The documentation only talks about the keyFilename option, but my editor was also showing me KeyFile as an autocomplete option, and I used it without thinking too much about it.
keyFilename is the one you want to use, not keyFile (what is this one for ? No idea)
In short:
new Storage({keyFilename: "/path/to/key.json"}); // this works
new Storage({keyFile: "/path/to/key.json"}); // this doesn't

Uploading fire to Firebase Storage using firebase-admin in Node

I need to upload a file from a Node script to my Firebase storage using firebase-admin.
This is how I'm doing in the browser: (IT WORKS)
// THIS IS INSIDE A FUNCTION
const storageRef = firebase.storage().ref('my-images/' + fileName);
const uploadTask = storageRef.put(file,metadata);
This is how I'm TRYING to do in the Node script:
const admin = require('firebase-admin');
admin.initializeApp();
// THIS IS INSIDE A FUNCTION
const storageRef = admin.storage().ref('my-images/' + fileName);
const uploadTask = storageRef.put(file,metadata);
And I'm getting the error:
TypeError: admin.storage(...).ref is not a function
I also tried:
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
storageBucket: "MY_BUCKET.appspot.com"
});
// THIS IS INSIDE A FUNCTION
const storageRef = admin.storage().bucket().ref('my-images/' + fileName);
const uploadTask = storageRef.put(file,metadata);
And I'm getting the error:
TypeError: admin.storage(...).bucket(...).ref is not a function
QUESTION
I need to save the file in my default bucket inside the my-images folder. What is the proper way of doing it?
With the Google Cloud Storage Node.js Client you need to use a different approach compare to the JavaScript SDK.
More precisely, you need to use the upload() method of a Bucket, as follows:
bucket.upload('my-images/' + fileName).then(function(data) {
const file = data[0];
});
You will find more example in the documentation (link above)

App Engine doesn't recognize a JSON file

I'm new in this kind of platform (cloud) and I have a problem with App Engine.
I have the following project structure in App Engine:
Root
nodejs-docs-samples
src
be-nodejs-piloto
node_modules
api.js (api post uri)
app.js (main)
app.yaml
datastore-quickstart.json (datastore key client)
package-lock.json
package.json
vibrant-tree-191664
app.yaml (content)
runtime: nodejs
env: flex
manual_scaling:
instances: 1
resources:
cpu: 1
memory_gb: 0.5
disk_size_gb: 10
api.js (content)
'use strict';
// Load libs.
var express = require('express');
var router = express.Router();
const Datastore = require('#google-cloud/datastore'); // Imports the Google Cloud client lib
// Your Google Cloud Platform project ID
const projectId = 'datastore-quickstart-191515';
const keyFilename = '/home/testcloud99/src/be-nodejs-piloto/datastore-quickstart-5672f2cde8ca.json';
console.log('keyFilename:' + keyFilename);
// Creates a client
const datastore = new Datastore({
projectId: projectId,
keyFilename: keyFilename
});
router.route('/api/piloto')
.post(function (req, res)
{
console.log('method: POST');
// Read params
var pMsgId = req.body.msgId;
const query = datastore.createQuery('MyEntity');
query.filter('msgId', '=', pMsgId);
// exec query
datastore
.runQuery(query)
.then(results => {
//OK
return res.status(200).jsonp({
"piloto":
{
"code" : 0,
"desc" : "ok",
}
});
})
.catch(err => {
console.error('ERROR:', err);
return res.status(200).jsonp({
"piloto":
{
"code" : 1,
"desc" : "error",
"errorMessage" : err.message
}
});
});
});
module.exports = router;
So, when I send a POST message (using soapUI), I get this response:
{"piloto": {
"code": 1,
"desc": "error",
"errorMessage": "ENOENT: no such file or directory, open '/home/testcloud99/src/be-nodejs-piloto/datastore-quickstart-5672f2cde8ca.json'"
}}
I guess App Engine isn't recognizing that JSON file but I don't know why. Any kind of configuration that should be done?
PD. I have also tried setting "GOOGLE_APPLICATION_CREDENTIALS" environment variable with "Datastore" constructor without "keyFilename" parameter and I got same result.
Hope you can help me.
Regards.
The problem is this definition:
const keyFilename = '/home/testcloud99/src/be-nodejs-piloto/datastore-quickstart-5672f2cde8ca.json';
You can't use absolute file paths from your local machine, that filesystem doesn't exist on the cloud machine. You have to use paths relative to your appplication's directory, which is the one where the application's app.yaml file exists, in your case /home/testcloud99/src/be-nodejs-piloto, try this instead:
const keyFilename = 'datastore-quickstart.json';
Note that I updated the filename as well as from your directory structure there is no datastore-quickstart-5672f2cde8ca.json file in there. Check that it's indeed the file you desire.
it would be good to point out that in the official Google documentation this year (2021) it says that the default would be Mac/linux
export GOOGLE_APPLICATION_CREDENTIALS="/home/user/Downloads/service-account-file.json"
and Windows:
For PowerShell:
$env:GOOGLE_APPLICATION_CREDENTIALS="KEY_PATH"
Replace KEY_PATH with the path of the JSON file that contains the service account key.
Example:
$env:GOOGLE_APPLICATION_CREDENTIALS="C:\Users\username\Downloads\service-account-file.json"
For command prompt:
set GOOGLE_APPLICATION_CREDENTIALS=KEY_PATH
Replace KEY_PATH with the path of the JSON file that contains the service account key.
based on the model of the official documentation we have this example
// Imports the Google Cloud client library
const {Datastore} = require('#google-cloud/datastore');
// Creates a client
const datastore = new Datastore();
async function quickstart() {
// The kind for the new entity
const kind = 'Task';
// The name/ID for the new entity
const name = 'sampletask1';
// The Cloud Datastore key for the new entity
const taskKey = datastore.key([kind, name]);
// Prepares the new entity
const task = {
key: taskKey,
data: {
description: 'Buy milk',
},
};
// Saves the entity
await datastore.save(task);
console.log(`Saved ${task.key.name}: ${task.data.description}`);
}
quickstart();

Resources