I currently have the following code to fetch matching Google Places according to a received query as shown below:
async function searchGoogleBusiness(req, res) {
let { name } = req.query;
const apiKey = process.env.API_KEY;
const searchUrl = `https://maps.googleapis.com/maps/api/place/textsearch/json?query=`;
try {
let { data } = await axios.get(`${searchUrl}${name}&key=${apiKey}`)
let { status, error_message, results } = data;
if (status === 'OK') {
let businessResults = [];
if ((results ?? []).length > 0) {
for (let business of results) {
let businessDetails = {
....
}
if ((business.photos ?? []).length > 0) {
let { width = 1200, height = 1200, photo_reference } = business.photos[0];
let photoUrl = `https://maps.googleapis.com/maps/api/place/photo?photoreference=${photo_reference}&sensor=false&maxheight=${height}&maxwidth=${width}&key=${apiKey}`
try {
let businessPhotoResponse = await axios.get(photoUrl, { responseType: 'arraybuffer' });
let imageBuffer = businessPhotoResponse.data;
let base64Image = Buffer.from(imageBuffer, 'binary').toString('base64');
businessDetails.photo = `data:${businessPhotoResponse.headers['content-type']};base64,${base64Image}`;
} catch (e) {
businessDetails.photo = business.icon;
}
} else {
businessDetails.photo = business.icon;
}
businessResults.push(businessDetails);
}
}
...//Omitted
}
...//Omitted
} catch (e) {
...//Omitted
}
}
As you can immediately notice, the function takes forever to return when the results are more than 5 and the reason is because I'm looping through each business to make another api call to fetch each photo.
I don't like this approach at all.
This idea of making another network call using photoReferences is really affecting my site speed and basically just makes my users angry.
Is there no way to automatically fetch the photo urls along just in the first request?
Related
I am doing file update operations using GitHub API. For example, let's say I am having a file called App.js and i am updating first line. While updating I will pass the sha.
When i am trying to update another line in the same file, the new updated sha is not coming, the older sha is coming. I am not doing all these steps manually, I have written code for all these operations. Let me paste the code for understanding,
var cosmos = [
{
"_id":"63773144c3160f782c087e35",
"nfrid":"637328ebf5c4b2558b064809",
"nfrname":"azuread",
"fileName":"index.js",
"isImport":true,
"isConst":false,
"isComponent":false,
"isNewFile":false,
"isPackage":false,
"landmark":null,
"isAfter":null,
"fileContent":"import { MsalProvider } from '#azure/msal-react';import { msalConfig } from './authConfig';import {PublicClientApplication } from '#azure/msal-browser';",
"filePath":"src/index.js",
"isIndexHtml":false,
"projecttypeid":"6372366d1b568e00d8af2e44",
"projecttypetitle":"PWA React",
"isReplace":false
},
{
"_id":"637731a2c3160f782c087e37",
"nfrid":"637328ebf5c4b2558b064809",
"nfrname":"azuread",
"fileName":"index.js",
"isImport":false,
"isConst":false,
"isComponent":true,
"isNewFile":false,
"isPackage":false,
"landmark":"<App />",
"isAfter":null,
"fileContent":"<MsalProvider instance={msalInstance}><App /></MsalProvider>",
"filePath":"src/index.js",
"isIndexHtml":false,
"projecttypeid":"6372366d1b568e00d8af2e44",
"projecttypetitle":"PWA React",
"isReplace":true,
}
];
for (let i = 0; i < cosmos.length; i++) {
switch (true) {
case cosmos[i].isImport:
const statusImport = common.updateImport(cosmos[i]);
console.log(statusImport);
break;
case cosmos[i].isConst:
const statusConst = common.updateConst(cosmos[i]);
console.log(statusConst);
break;
case cosmos[i].isPackage:
const statusPackage = common.updatePackage(cosmos[i]);
console.log(statusPackage);
break;
case cosmos[i].isIndexHtml:
const statusIndexHtml = common.updateIndexHTML(cosmos[i]);
console.log(statusIndexHtml);
break;
case cosmos[i].isNewFile:
const statusNewFile = common.addNewFile(cosmos[i]);
console.log(statusNewFile);
break;
case cosmos[i].isComponent:
const statusComponent = common.updateComponent(cosmos[i]);
console.log(statusComponent);
break;
default:
console.log("Nothing to add/update");
break;
}
}
I will be updating both these things in index.js file only. But the first thing is getting updated properly, when the second thing comes, it shows error like "src/index.js does not match with sha-id". I checked , that sha id is not the updated one, it changes after first update happens and i am getting that error . i will also paste my code logic which i wrote for updating files.
async function updateImport(cosmos) {
let temp = cosmos.filePath;
let newSha = "";
let newContent = "";
let decodedContent = "";
const octokit = new Octokit({
auth: "ghp_auth-token"
});
try {
return await new Promise(async (resolve, reject) => {
const response = await octokit.request(
`GET /repos/JaspreetAhden24/TestingGitHubIO/contents/${temp}`
);
newSha = response.data.sha;
decodedContent = atob(response.data.content);
if (cosmos.isImport === true) {
newContent = cosmos.fileContent + decodedContent;
}
let encodedContent = Buffer.from(newContent).toString("base64");
const update = await octokit.request(
`PUT /repos/JaspreetAhden24/TestingGitHubIO/contents/${temp}`,
{
message: "Updating import statements",
content: encodedContent,
sha: newSha
}
);
console.log(update.status);
resolve("success");
});
} catch (error) {
console.log(error);
}
}
async function updateComponent(cosmos) {
let temp = cosmos.filePath;
let newSha = "";
let newContent = "";
let decodedContent = "";
const octokit = new Octokit({
auth: "ghp_auth token"
});
try {
return await new Promise(async (resolve, reject) => {
const response = await octokit.request(
`GET /repos/JaspreetAhden24/TestingGitHubIO/contents/${temp}`
);
newSha = response.data.sha;
decodedContent = atob(response.data.content);
if (cosmos.isComponent === true) {
if (cosmos.isAfter === false) {
newContent = decodedContent.replace(
cosmos.landmark,
cosmos.fileContent + cosmos.landmark
);
} else if(cosmos.isAfter === true) {
newContent = decodedContent.replace(
cosmos.landmark,
cosmos.landmark + cosmos.fileContent
);
}
else if (cosmos.isReplace === true) {
newContent = decodedContent.replace(
cosmos.landmark,
cosmos.fileContent
);
}
}
// var formattedContent = prettier.format(newContent,{ semi: false, parser: "mdx" });
let encodedMDContent = Buffer.from(newContent).toString("base64");
const update = await octokit.request(
`PUT /repos/JaspreetAhden24/TestingGitHubIO/contents/${temp}`,
{
message: "Updating components",
content: encodedMDContent,
sha: newSha
}
);
console.log(update.status);
resolve("success");
});
} catch (error) {
console.log(error);
}
}
i need to know why the latest updated sha id is not coming when the second update is happening.
I need to know if the problem is with switch case or the new updated sha will not come in the second update function.
I'm little bit confusing in promises. first, I have some ugly code like this:
async function presence(ctx) {
try {
var prsenceData = [];
var isSuccess = Boolean(false);
var ckFilePath = "./somepath/cookie.json";
if (!fs.existsSync(ckFilePath)) {
await menuLogin.login(ctx).then(login => {
isSuccess = Boolean(login[0].status);
myCk.saveCookies(login[0].cookies, ckFilePath);
if (!isSuccess) {
myCk.deleteCookies(ckFilePath);
return false;
}
});
} else {
await myCk.checkToDelete(ckFilePath).then(isDel => {
if (isDel) {
return false;
}
});
}
await presenceNow.check(fs.existsSync(ckFilePath), ctx).then(data => {
for (let id = 0; id < data[0].pesan.length; id++) {
console.log(data[0].pesan[id]);
}
for (let id = 0; id < data[0].id.length; id++) {
presenceData.push(data[0].id);
}
if (data[0].pesan.length == 0 && fs.existsSync(ckFilePath)) {
myCk.deleteCookies(ckFilePath);
}
});
} catch (e) {
console.log(e);
}
return presenceData;
}
Can anyone explain why presenceNow.check() function is not calling if my ckFilePath does not exist? but if myCkFilePath is exist, my code run so well. And maybe anyone can show me the better code for that case? thanks.
Mixing async/await and promise chains like this is something of a code smell that the author lacked an understand of async/await. It's also something of a mixed metaphor.
If you refactor it to actually use async/await you get something like this that's a lot easier to understand.
My suspicion is that your presenceNow.check() method is not being called because the function is taking returning via one of the two return paths above it:
the file exists and myCk.checkToDelete() returns true, or
the file does not exist, and the login is unsuccessful.
const fs = require('fs/promises');
async function presence(ctx) {
var presenceData = [];
var isSuccess = false;
var ckFilePath = "./somepath/cookie.json";
let ckFilePathExists = await fs.access(ckFilePath);
if (ckFilePathExists) {
const isDel = await myCk.checkToDelete(ckFilePath);
if (isDel) {
return false;
}
} else {
const login = await menuLogin.login(ctx);
const isSuccess = login[0].status
myCk.saveCookies(login[0].cookies, ckFilePath);
if (!isSuccess) {
myCk.deleteCookies(ckFilePath);
return false;
}
}
ckFilePathExists = await fs.access(ckFilePath)
const data = await presenceNow.check(ckFilePathExists, ctx);
for (let id = 0; id < data[0].pesan.length; id++) {
console.log(data[0].pesan[id]);
}
for (let id = 0; id < data[0].id.length; id++) {
presenceData.push(data[0].id);
}
if (data[0].pesan.length == 0 && await fs.access(ckFilePath) ) {
myCk.deleteCookies(ckFilePath);
}
return presenceData;
}
I have an API in NestJs which is not sending data on the first hit. However, on hitting it again it sends the desired data. I am guessing the API returns before the internal processing is done.
How to stop this. Is sleep a good option for this?
Or is there any other way to do this?
#Post("load")
#UseGuards(AuthGuard("jwt"))
async load(#Req() body: any)
{
const organizationId = body.user.organizationId;
const userId = body.user.userId;
if ("brandIds" in body.body)
{
await this.userService.onBoardUser(userId);
}
var settings = await this.settingsService.fetchLayout(organizationId, "home");
settings.forEach(async (element) =>
{
var parsedElement = JSON.parse(JSON.stringify(element));
var innerContent = await this.fetchContent(parsedElement.method, organizationId, userId);
var template = parsedElement.content[0];
let formattedItem = {};
innerContent.forEach((item) =>
{
try
{
formattedItem = template;
Object.keys(template).forEach((key) =>
{
if (template[key]!= "" && key != "type")
{
formattedItem[key] = eval(template[key]);
}
});
parsedElement.content.push(formattedItem);
formattedItem = null;
}
catch(err)
{
}
});
this.response.data.push(parsedElement);
innerContent = null;
template = null;
formattedItem = null;
parsedElement = null;
});
return(this.response);
}
looks like your main problem here is that your using async/await inside foreach which isnt working.
Use it like this:
for (const setting of settings) {
... your async code here.
}
To get the complete log of a particular participant. I was following the instruction from this answer
and below is the code. It is throwing an error in Composer Playground as Native API not available in web runtime
async function participantHistory(tx) {
const partId = tx.tradeid;
const nativeSupport = tx.nativeSupport;
// const partRegistry = await getParticipantRegistry('org.example.trading.Trader')
const nativeKey = getNativeAPI().createCompositeKey('Asset:org.example.trading.Trader', [partId]);
const iterator = await getNativeAPI().getHistoryForKey(nativeKey);
let results = [];
let res = {done : false};
while (!res.done) {
res = await iterator.next();
if (res && res.value && res.value.value) {
let val = res.value.value.toString('utf8');
if (val.length > 0) {
console.log("#debug val is " + val );
results.push(JSON.parse(val));
}
}
if (res && res.done) {
try {
iterator.close();
}
catch (err) {
}
}
}
var newArray = [];
for (const item of results) {
newArray.push(getSerializer().fromJSON(item));
}
console.log("#debug the results to be returned are as follows: ");
return newArray; // returns something to my NodeJS client (called via REST API)
}
It is mentioned in the documentation too Calling Hyperledger Fabric APIs in transaction processor functions
Please help how to use this getNativeAPI.
I want to store an ordered list of image paths in a Firestore document. The images are uploaded to storage with metadata containing the position and a tag linking to a document. The amount of images to expect is known. Uploading of the images is done nearly simultaneously.
Right know, I do this by storing the list of image paths as an array.
In short: an image is uploaded, using a transaction a document is read, the array is altered and saved again. The transaction never fails, but sometimes an array item disappears again after another image is uploaded.
My two questions are: is an array the best structure in a NoSql database for this problem? Why do array item disappear despite of the transaction?
export const addImageToTag = functions.storage.object().onFinalize(async (object) => {
try {
const tag = object.metadata.Tag;
const position = object.metadata.Position;
const tagRef = db.collection(COLLECTION_TAGS).doc(tag)
const tagSnapshot = await db.runTransaction(t => t.get(tagRef));
const images = tagSnapshot.data().images;
images[position] = object.name;
let uploadCount = 0;
for (let i = 0; i < tagSnapshot.data().imageCount; i++) {
if (images[i].length > 0) {
uploadCount++;
} else {
break;
}
}
let state;
if (tagSnapshot.data().imageCount === uploadCount) {
state = STATE_ACTIVE;
} else {
state = STATE_UPLOADING;
}
await tagSnapshot.ref.update({
images: images,
state: state
});
console.log('Image ', position, ' of tag ', tag, 'added')
} catch (error) {
console.error('Call failed: ', error)
}
});
The second question is answered, actually putting the code in the transaction block did the trick. I had read that referencing to the tag document using tagSnapshot.ref was sufficient, which was false.
export const addImageToTag = functions.storage.object().onFinalize(async (object) => {
try {
const tag = object.metadata.Tag;
const position = object.metadata.Position;
const tagRef = db.collection(COLLECTION_TAGS).doc(tag)
await db.runTransaction(async transaction => {
const tagSnapshot = await transaction.get(tagRef)
const images = tagSnapshot.data().images;
images[position] = object.name;
let uploadCount = 0;
for (let i = 0; i < tagSnapshot.data().imageCount; i++) {
if (images[i].length > 0) {
uploadCount++;
} else {
break;
}
}
let state;
if (tagSnapshot.data().imageCount === uploadCount) {
state = STATE_ACTIVE;
} else {
state = STATE_UPLOADING;
}
transaction.update(tagRef, {
images: images,
state: state
});
if (state === STATE_ACTIVE) {
console.log('All images of tag ', tag, ' were succesfully added')
}
});
} catch (error) {
console.error('Call failed: ', error)
}
});