I'm new in this kind of platform (cloud) and I have a problem with App Engine.
I have the following project structure in App Engine:
Root
nodejs-docs-samples
src
be-nodejs-piloto
node_modules
api.js (api post uri)
app.js (main)
app.yaml
datastore-quickstart.json (datastore key client)
package-lock.json
package.json
vibrant-tree-191664
app.yaml (content)
runtime: nodejs
env: flex
manual_scaling:
instances: 1
resources:
cpu: 1
memory_gb: 0.5
disk_size_gb: 10
api.js (content)
'use strict';
// Load libs.
var express = require('express');
var router = express.Router();
const Datastore = require('#google-cloud/datastore'); // Imports the Google Cloud client lib
// Your Google Cloud Platform project ID
const projectId = 'datastore-quickstart-191515';
const keyFilename = '/home/testcloud99/src/be-nodejs-piloto/datastore-quickstart-5672f2cde8ca.json';
console.log('keyFilename:' + keyFilename);
// Creates a client
const datastore = new Datastore({
projectId: projectId,
keyFilename: keyFilename
});
router.route('/api/piloto')
.post(function (req, res)
{
console.log('method: POST');
// Read params
var pMsgId = req.body.msgId;
const query = datastore.createQuery('MyEntity');
query.filter('msgId', '=', pMsgId);
// exec query
datastore
.runQuery(query)
.then(results => {
//OK
return res.status(200).jsonp({
"piloto":
{
"code" : 0,
"desc" : "ok",
}
});
})
.catch(err => {
console.error('ERROR:', err);
return res.status(200).jsonp({
"piloto":
{
"code" : 1,
"desc" : "error",
"errorMessage" : err.message
}
});
});
});
module.exports = router;
So, when I send a POST message (using soapUI), I get this response:
{"piloto": {
"code": 1,
"desc": "error",
"errorMessage": "ENOENT: no such file or directory, open '/home/testcloud99/src/be-nodejs-piloto/datastore-quickstart-5672f2cde8ca.json'"
}}
I guess App Engine isn't recognizing that JSON file but I don't know why. Any kind of configuration that should be done?
PD. I have also tried setting "GOOGLE_APPLICATION_CREDENTIALS" environment variable with "Datastore" constructor without "keyFilename" parameter and I got same result.
Hope you can help me.
Regards.
The problem is this definition:
const keyFilename = '/home/testcloud99/src/be-nodejs-piloto/datastore-quickstart-5672f2cde8ca.json';
You can't use absolute file paths from your local machine, that filesystem doesn't exist on the cloud machine. You have to use paths relative to your appplication's directory, which is the one where the application's app.yaml file exists, in your case /home/testcloud99/src/be-nodejs-piloto, try this instead:
const keyFilename = 'datastore-quickstart.json';
Note that I updated the filename as well as from your directory structure there is no datastore-quickstart-5672f2cde8ca.json file in there. Check that it's indeed the file you desire.
it would be good to point out that in the official Google documentation this year (2021) it says that the default would be Mac/linux
export GOOGLE_APPLICATION_CREDENTIALS="/home/user/Downloads/service-account-file.json"
and Windows:
For PowerShell:
$env:GOOGLE_APPLICATION_CREDENTIALS="KEY_PATH"
Replace KEY_PATH with the path of the JSON file that contains the service account key.
Example:
$env:GOOGLE_APPLICATION_CREDENTIALS="C:\Users\username\Downloads\service-account-file.json"
For command prompt:
set GOOGLE_APPLICATION_CREDENTIALS=KEY_PATH
Replace KEY_PATH with the path of the JSON file that contains the service account key.
based on the model of the official documentation we have this example
// Imports the Google Cloud client library
const {Datastore} = require('#google-cloud/datastore');
// Creates a client
const datastore = new Datastore();
async function quickstart() {
// The kind for the new entity
const kind = 'Task';
// The name/ID for the new entity
const name = 'sampletask1';
// The Cloud Datastore key for the new entity
const taskKey = datastore.key([kind, name]);
// Prepares the new entity
const task = {
key: taskKey,
data: {
description: 'Buy milk',
},
};
// Saves the entity
await datastore.save(task);
console.log(`Saved ${task.key.name}: ${task.data.description}`);
}
quickstart();
Related
I'm trying to integrate AWS Transcribe in my Node.JS application. AWS S3 and Polly works fine, but AWS Transcribe does not. I'm using the example code of AWS.
When I want to start a transcribe job by the AWS example code I receive the following error: The requested module './libs/transcribeClient.js' does not provide an export named 'transcribeClient'
That was also the only file where I received the error that required is not defined. I wonder why it only happens with AWS transcribe but not with the other services as well? I'm also able to start a transcribe job via the AWS CLI.
That AWS Transcribe code does not work - transcribeClient.js:
const AWS_BUCKET_NAME="X"
const AWS_REGION="eu-central-1"
const AWS_ACCESS_KEY="XXX"
const AWS_SECRET_KEY="XXX"
// snippet-start:[transcribe.JavaScript.createclientv3]
const { TranscribeClient } = require('#aws-sdk/client-transcribe');
// Create anAmazon EC2 service client object.
const transcribeClient = new TranscribeClient({ AWS_REGION, AWS_ACCESS_KEY, AWS_SECRET_KEY });
module.exports = { transcribeClient };
That AWS Polly code works - pollyClient.js:
const AWS_BUCKET_NAME="X"
const AWS_REGION="eu-central-1"
const AWS_ACCESS_KEY="XXX"
const AWS_SECRET_KEY="XXX"
// snippet-start:[polly.JavaScript.createclientv3]
const { PollyClient } =require( "#aws-sdk/client-polly");
// Create an Amazon S3 service client object.
const pollyClient = new PollyClient({ AWS_REGION, AWS_ACCESS_KEY, AWS_SECRET_KEY});
module.exports = { pollyClient };
I'm looking forward to reading from you! Thanks!
I solved it. Now it's working with my Node.js 12 environment.
package.json
I changed "type": "modules" to "type": "commonjs".
transcribeClient.js needs to look like this:
Here I changed export to module.exports.
const { TranscribeClient } = require("#aws-sdk/client-transcribe");
const transcribeClient = new TranscribeClient({ AWS_REGION, AWS_ACCESS_KEY, AWS_SECRET_KEY});
module.exports = { transcribeClient };
transcribe_create_job.js needs to look like this:
Here I changed the import statement to require.
const { StartTranscriptionJobCommand } = require("#aws-sdk/client-transcribe");
const { transcribeClient } = require("./libs/transcribeClient.js")
// Set the parameters
const params = {
TranscriptionJobName: "test123",
LanguageCode: "en-GB", // For example, 'en-US'
MediaFormat: "webm", // For example, 'wav'
Media: {
MediaFileUri: "https://x.s3.eu-central-1.amazonaws.com/dlpasiddi.webm",
},
};
const run = async () => {
try {
const data = await transcribeClient.send(
new StartTranscriptionJobCommand(params)
);
console.log("Success - put", data);
return data; // For unit tests.
} catch (err) {
console.log("Error", err);
}
};
run();
I'm new with uploading files, I want to upload images of different products into Firebase storage and another file that required in the app, one product can have many images, so I want to create a folder for every product, the name of the folder will be the id of the product.
In code: I use #google-cloud/storage library to upload the file into firebase storage, but I search in documentation, no way that I can be able to create a folder then upload it to folder.
here my code :
I create middleware of multer to pass it in an endpoint, with check type of file.
const express = require("express");
const Multer = require("multer");
const { Storage } = require("#google-cloud/storage")
const storage = new Storage({
projectId: process.env.PROJECT_FIREBASE_ID,
keyFilename: "hawat-service.json",
});
const bucket = storage.bucket(process.env.BUCKET_NAME);
const multer = Multer({
storage: Multer.memoryStorage(),
fileFilter: (req, file, cb) => {
checkFileType(req, file, cb);
}
})
const checkFileType = (req ,file, cb) => {
if (file.fieldname == 'cover' || file.fieldname == 'images') {
if (!file.originalname.match(/\.(jpg|JPG|jpeg|JPEG|png|PNG|gif|GIF)$/)) {
req.error = new Error( "Only images are allowed")
return cb(null, false);
}
} else if (file.fieldname == 'card' || file.fieldname == 'licence') {
if (!file.originalname.match(/\.(pdf|jpg|JPG|jpeg|JPEG|png|PNG|gif|GIF)$/)) {
req.error = new Error("Only images and pdf are allowed")
return cb(null, false);
}
}
return cb(null, true)
}
module.exports = (req, res, next) => {
return multer.fields([{ name: 'cover', maxCount: 1 },
{ name: 'images', maxCount: 5 }, { name: 'card', maxCount: 1 },
{ name: 'licence', maxCount: 1 }
])
(req, res, () => {
if (req.error) return res.status(400).send( {message : req.error.message })
next()
})
}
the function to upload file is
const express = require("express");
const Multer = require("multer");
const { Storage } = require("#google-cloud/storage");
const storage = new Storage({
projectId: process.env.PROJECT_FIREBASE_ID,
keyFilename: "hawat-service.json",
});
const bucket = storage.bucket(process.env.BUCKET_NAME);
module.exports = {
upload: async ( file) => {
return new Promise((resolve, reject) => {
let newFileName = `${file.originalname}_${Date.now()}`;
let fileUpload = bucket.file(newFileName);
const createStream = fileUpload.createWriteStream({
metadata: {
contentType: file.mimetype
}
});
createStream.on('error', (error) => {
console.log("error in uploading is" , error)
reject('Something is wrong! Unable to upload at the moment.');
});
createStream.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
const url = `https://storage.googleapis.com/${bucket.name}/${fileUpload.name}`;
// storage.bucket(process.env.BUCKET_NAME).file(fileUpload.name).makePublic();
resolve(url);
});
createStream.end(file.buffer);
});
the endpoint is
router.post('/add-product' , auth, multer , seller.onAddProduct)
the function onAddProduct is a function that can receive multiple files from the user.
So How can I create a folder for every product, then upload files in the folder?
also, How can I delete the folder after created it?
I am not using the same method you are using but you could use my solution as a case study
await storage.bucket(bucketName).upload(filename, {
destination:"{Foldername}/{Filename}",
})
Folders in Google Cloud Storage are not really a thing. As you can see in this documentation:
gsutil provides the illusion of a hierarchical file tree atop the "flat" name space supported by the Cloud Storage service. To the service, the object gs://your-bucket/abc/def.txt is just an object that happens to have "/" characters in its name. There is no "abc"directory, just a single object with the given name
So what you see as a folder in Cloud Storage is simply another object that is emulating a folder structure, what really matters are the object paths.
In your case there are 2 ways you can go about what you want to do, you can either:
Create an emulated empty directory by creating an object that ends in a trailing slash. For example, to create a subdirectory called foo at the root of a bucket, you would create an empty object (size 0) called foo/ and then upload the file with it's full path.
Simply upload the file with it's full path including the desired "subdirectory" and when you fetch it from GCS it will look like it is located at the emulated directory.
Personally I would use the latter, as you will achieve the same results with only 1 step instead of 2.
If you want to create an empty folder in Cloud Storage, you can do this:
const userId = "your_user_id"
// Folder name. Notice the slash at the end of the path
const folderName = `users/${userId}/`;
// Create a folder
await bucket.file(folderName).save("");
After creating the new folder, you can upload your file there by setting its destination:
const destination = `${folderName}${fileName}`;
await bucket.upload(file, {
destination,
})
But actually you don't need to create a folder as a separate step. You can just set full destination for your file in bucket.upload(...) as described above.
I am trying to create a GCP Storage Bucket using the Node.js library.
I've been using the steps here:
https://cloud.google.com/storage/docs/creating-buckets#storage-create-bucket-nodejs
And code pasted below.
The challenge is that my bucket keeps being created in the wrong project. My project is set in my gcloud cli, it's set in my node environment, and it's set in my script.
Is there some way to set the project in the values you pass to the library's createBucket function?
/**
* TODO(developer): Uncomment the following line before running the sample.
*/
// const bucketName = 'Name of a bucket, e.g. my-bucket';
// const storageClass = 'Name of a storage class, e.g. coldline';
// const location = 'Name of a location, e.g. ASIA';
// Imports the Google Cloud client library
const {Storage} = require('#google-cloud/storage');
// Creates a client
const storage = new Storage();
async function createBucketWithStorageClassAndLocation() {
// For default values see: https://cloud.google.com/storage/docs/locations and
// https://cloud.google.com/storage/docs/storage-classes
const [bucket] = await storage.createBucket(bucketName, {
location,
[storageClass]: true,
});
console.log(
`${bucket.name} created with ${storageClass} class in ${location}.`
);
}
createBucketWithStorageClassAndLocation();
You can specify projectId when you initialize the Storage class:
const storage = new Storage({
projectId: 'my-project-id',
keyFilename: '/path/to/keyfile.json'
});
Source
I am utilising the Cloud Vision API "PDF to Text" function for a Node.js app. I have mostly stuck close to the GCP docs example, with a couple of tweaks here and there: https://cloud.google.com/vision/docs/pdf
All works fine, however I would like the contents of the file to be returned to me as a JSON object so I can pass it into another funciton, rather than its current behaviour of writing the contents to a JSON file and storing it on Cloud Storage.
Does anyone know how I need to structure the outputConfig object in order to achieve this?
async function detectPdfText(bucketName, fileName) {
// Imports the Google Cloud client libraries
const vision = require('#google-cloud/vision').v1;
// Creates a client
const client = new vision.ImageAnnotatorClient({
keyFilename: './APIKey.json'
});
// The directory to store the results
const outputPrefix = 'json_output'
const gcsSourceUri = `gs://${bucketName}/${fileName}`;
const gcsDestinationUri = `gs://${bucketName}/${outputPrefix}/`;
const inputConfig = {
// Supported mime_types are: 'application/pdf' and 'image/tiff'
mimeType: 'application/pdf',
gcsSource: {
uri: gcsSourceUri,
},
};
const outputConfig = {
gcsDestination: {
uri: gcsDestinationUri,
},
};
const features = [{type: 'DOCUMENT_TEXT_DETECTION'}];
const request = {
requests: [
{
inputConfig: inputConfig,
features: features,
outputConfig: outputConfig,
},
],
};
const [operation] = await client.asyncBatchAnnotateFiles(request);
const [filesResponse] = await operation.promise();
const destinationUri =
filesResponse.responses[0].outputConfig.gcsDestination.uri;
console.log(`Json output for file ${fileName} has been saved to: ${destinationUri}`);
}
module.exports = { detectPdfText };
You are using asyncBatchAnnotateFiles, which only writes the output to GCS: https://cloud.google.com/vision/docs/reference/rpc/google.cloud.vision.v1#outputconfig . There is no option to return the result of the LRO in the GetOperation call.
You could instead use batchAnnotateFiles and get the results synchronously, then convert to json.
If you have to use asyncBatchAnnotateFiles, then you would have to download the GCS file after the LRO finishes.
I have a simple get request built using node, express to retrieve data from datastore. I am not able to get back the results. 'get' request async call is stuck. Not sure what is happening.
const express = require('express');
const {Datastore} = require('#google-cloud/datastore');
const app = express();
// Your Google Cloud Platform project ID
const projectId = 'xxx';
// Creates a client
const datastore = new Datastore({
projectId: projectId,
keyFilename: '/Masters-LUC/spring-2019/internship/service-keys/xxx.json'
});
const query = datastore
.createQuery('approvals')
.filter('status', '=', 'yes');
app.get("/api/get", (req, res, next) => {
query.run().then(([documents]) => {
documents.forEach(doc => console.log(doc));
});
});
module.exports = app;
I re-wrote the same using async function. The below is working. Why not the above?
// Retrieve data from datastore
async function quickStart() {
// Your Google Cloud Platform project ID
const projectId = 'xxx';
// Creates a client
const datastore = new Datastore({
projectId: projectId,
keyFilename: '/Masters-LUC/spring-2019/internship/service-
keys/xxx.json'
});
const query = datastore
.createQuery('approvals')
.filter('status', '=', 'yes');
const [approvals] = await datastore.runQuery(query);
console.log('Tasks:');
approvals.forEach(task => console.log(task));
}
quickStart().catch(console.error);
The two things I notice that is different between your two functions. In the first you reuse the query object across function invocations. Query objects should not be reused.
The second thing I notice is that you don't use res that's passed into your function parameter to app.get().
Modified working code -
app.get("/api/approvals/", (req, res, next) => {
const query = datastore
.createQuery('approvals');
query.run().then((approvals) => {
approvals.forEach(appr => console.log(appr)); // This is used to log results on console for verification
// loading results on the response object to be used later by client
res.status(200).json(
{
message: "Request was processed successfully!",
approvals : approvals
}
);
})
})