I try to leverage #aws-sdk/lib-dynamodb for simplifying data retrieval from AWS DynamoDB.
# aws.js
const {DynamoDBClient,ScanCommand} = require('#aws-sdk/client-dynamodb');
const { DynamoDBDocumentClient } = require('#aws-sdk/lib-dynamodb');
const { fromIni } = require('#aws-sdk/credential-provider-ini');
const client = new DynamoDBClient({
credentials: fromIni({ profile: process.env.AWS_PROFILE }),
region: process.env.AWS_DEFAULT_REGION,
});
const ddb_client = DynamoDBDocumentClient.from(client);
const listItemsDDB = async (params) => {
try {
const command = new ScanCommand(params);
const data = await ddb_client.send(command);
return data;
} catch (err) {
throw Error(err);
}
};
module.exports = {listItemsDDB };
I import the functions into my rest_api.js.
Everything is based on an express js application.
...trimmed...
app.get('/courses/:year/:location/:month', async (req, res, next) => {
console.log(req.params);
const payload = {
TableName: ddb_calendar_name,
FilterExpression: '#l = :loc and #m = :month and #y = :year',
ExpressionAttributeValues: {
':loc': req.params.location,
':month': req.params.month,
':year': req.params.year,
},
ExpressionAttributeNames: {
'#l': 'Location',
'#m': 'Month',
'#y': 'Year',
},
};
try {
const result = await aws.listItemsDDB(payload);
return res.status(200).send(result.Items[0]);
} catch (error) {
next(error);
}
});
My data looks like this:
{
"Id": {
"S": "02322f8f-05a7-4e27-aaf9-08129f3128ef"
},
"Location": {
"S": "schildergasse"
},
"Month": {
"S": "august"
},
"Year": {
"S": "2021"
}
}
I use a rest client to test the endpoints.
After firing GET http://localhost:4000/courses/2021/schildergasse/august I receive:
Error: TypeError: Cannot read property '0' of undefined
at Object.listItemsDDB (/Users/olivergoetz/fifi-calendar/server/aws.js:39:9)
at processTicksAndRejections (node:internal/process/task_queues:96:5)
at async /Users/olivergoetz/fifi-calendar/server/rest_api.js:221:18
What point to I miss here?
I imported the ScanCommand from the wrong library:
I had to change this:
const {DynamoDBClient,ScanCommand} = require('#aws-sdk/client-dynamodb');
const { DynamoDBDocumentClient } = require('#aws-sdk/lib-dynamodb');
to this:
const {DynamoDBClient} = require('#aws-sdk/client-dynamodb');
const { DynamoDBDocumentClient, ScanCommand } = require('#aws-sdk/lib-dynamodb');
Related
Trying to write a query for DynamoDB and learn promises etc. The console.log(resp.Items) returns the object that I am looking for so I think my query is formatted correctly. I get a status 200 back with an empty object.
I have read up for a few days and tried to implement various changes to the code by nothing is returning the object resp.Items. I am guessing the function is returning before the const is updated with the data but I am not sure why the console.log works.
const AWS = require('aws-sdk')
const dynamodb = new AWS.DynamoDB()
const getTechs = async () => {
try {
const resp = await dynamodb
.query({
ExpressionAttributeValues: {
':tech': { S: 'TECH#' },
},
KeyConditionExpression: 'PK = :tech',
TableName: process.env.TABLE_NAME,
ScanIndexForward: true,
})
.promise()
console.log(resp.Items)
if (!resp.Items) {
return {
error: 'No Techs in the DB',
}
}
return {
tech: resp.Items,
}
} catch (error) {
console.log('Error retrieving Tech List')
console.log(error)
return {
error: 'Could not retrieve Tech List',
}
}
}
handler func
const { makeHandler } = require('./utils')
const { getTechs } = require('../data')
// const { Tech } = require('../entities')
const inputSchema = {
type: 'object',
properties: {
pathParameters: {
type: 'object',
properties: {
tech: { type: 'string' },
},
required: ['tech'],
},
},
required: ['pathParameters'],
}
const handler = async (event) => {
const { techs, error } = await getTechs()
const statusCode = error ? 500 : 200
const body = error ? JSON.stringify({ error }) : JSON.stringify({ techs })
return {
statusCode,
body,
}
}
module.exports.handler = makeHandler({ handler })
executeTransactWrite func
const executeTransactWrite = async ({ tech, params }) => {
const transactionRequest = tech.transactWriteItems(params)
let cancellationReasons
transactionRequest.on('extractError', (response) => {
try {
cancellationReasons = JSON.parse(
response.httpResponse.body.toString()
).CancellationReasons
} catch (err) {
// suppress this just in case some types of errors aren't JSON parseable
console.error('Error extracting cancellation error', err)
}
})
return new Promise((resolve, reject) => {
transactionRequest.send((err, response) => {
if (err) {
err.cancellationReasons = cancellationReasons
return reject(err)
}
return resolve(response)
})
})
}
module.exports = {
executeTransactWrite,
makehandler func
const middy = require('middy')
const {
jsonBodyParser,
validator,
httpErrorHandler,
} = require('middy/middlewares')
const makeHandler = ({ handler, inputSchema }) =>
middy(handler)
.use(jsonBodyParser())
.use(validator({ inputSchema }))
.use(httpErrorHandler())
module.exports = { makeHandler }
I have a problem with my API that sends metadata when called from my smart contract of website. Its NFT tokens and my database is postgres and API is node.js
The problem is when I mint 1 NFT metadata works perfect, but if I mint 2 or more it will only ever send 1 chunk of data? So only 1 NFT will mint properly and the rest with no data?
Do I need to set a loop function or delay? Does anyone have any experience with this?
Any help would be much appreciated.
Below is the code from the "controller" folder labeled "nft.js"
const models = require("../../models/index");
const path = require("path");
const fs = require("fs");
module.exports = {
create_nft: async (req, res, next) => {
try {
const dir = path.resolve(__dirname + `../../../data/traitsfinal.json`);
const readCards = fs.readFileSync(dir, "utf8");
const parsed = JSON.parse(readCards);
console.log("ya data ha final ??", parsed);
parsed.forEach(async (item) => {
// return res.json(item)
let newNft = await models.NFT.create({
name: item.Name,
description: item.Description,
background: item.Background,
body: item.Body,
mouth: item.Mouth,
eyes: item.Eyes,
head_gear: item.Head_Gear,
tokenId: item.tokenId,
image: item.imagesIPFS,
});
});
return res.json({
data: "nft created",
error: null,
success: true,
});
} catch (error) {
console.log("server error", error.message);
next(error);
}
},
get_nft: async (req, res, next) => {
try {
const { id } = req.params;
// console.log("id ?????????",id)
// console.log("type of ",typeof(id))
// const n=Number(id)
// console.log("type of ",typeof(id))
const nft = await models.NFT.findByPk(id);
if (!nft) {
throw new Error("Token ID invalid");
}
if (!nft.isMinted) {
throw new Error("Token not minted");
}
console.log(nft);
// }
const resObj = {
name: nft.name,
description: nft.description,
image: `https://gateway.pinata.cloud/ipfs/${nft.image}`,
attributes: [
{ trait_type: "background", value: `${nft.background}` },
{ trait_type: "body", value: `${nft.body}` },
{ trait_type: "mouth", value: `${nft.mouth}` },
{ trait_type: "eyes", value: `${nft.eyes}` },
{ trait_type: "tokenId", value: `${nft.tokenId}` },
{
display_type: "number",
trait_type: "Serial No.",
value: id,
max_value: 1000,
},
],
};
return res.json(resObj);
} catch (error) {
console.log("server error", error.message);
next(error);
}
},
get_nft_all: async (req, res, next) => {
try {
// console.log("id ?????????",id)
// console.log("type of ",typeof(id))
// const n=Number(id)
// console.log("type of ",typeof(id))
const nft = await models.NFT.findAndCountAll({
limit: 10
});
// console.log(nft);
if (!nft) {
throw new Error("Token ID invalid");
}
// if (nft.isMinted) {
// throw new Error("Token not minted");
// }
// console.log(nft);
// }
var resObjarr = [];
for (var i = 0; i < nft.rows.length; i++) {
resObj = {
name: nft.rows[i].name,
description: nft.rows[i].description,
image: `https://gateway.pinata.cloud/ipfs/${nft.rows[i].image}`,
attributes: [
{ trait_type: "background", value: `${nft.rows[i].background}` },
{ trait_type: "body", value: `${nft.rows[i].body}` },
{ trait_type: "mouth", value: `${nft.rows[i].mouth}` },
{ trait_type: "eyes", value: `${nft.rows[i].eyes}` },
{ trait_type: "tokenId", value: `${nft.rows[i].tokenId}` },
{
display_type: "number",
trait_type: "Serial No.",
value: nft.rows[i].id,
max_value: 1000,
},
],
};
resObjarr.push(resObj);
}
console.log(JSON.stringify(resObjarr))
return res.json(resObjarr);
} catch (error) {
console.log("server error", error.message);
next(error);
}
},
mint: async (req, res, next) => {
try {
const { id } = req.params;
const updated = await models.NFT.findByPk(id);
if (!updated) {
throw new Error("NFT ID invalid");
}
if (updated.isMinted) {
throw new Error("NFT Already minted");
}
updated.isMinted = true;
updated.save();
return res.json({
data: "Token minted successfully",
error: null,
success: true,
});
} catch (error) {
console.log("server error", error.message);
next(error);
}
},
};
Below is from the routes folder.
const router = require("express").Router();
const auth=require("../middleware/auth")
const {
create_nft,
get_nft,
get_nft_all,
mint
} = require("../controller/nft");
router.post(
"/create",
create_nft
);
router.get(
"/metadata/:id",
get_nft
);
router.get(
"/metadata",
get_nft_all
);
router.put(
"/mint/:id",
mint
);
module.exports = router;
Looking your code,you may having some kind of asyncrhonous issue in this part:
parsed.forEach(async (item) => {
// return res.json(item)
let newNft = await models.NFT.create({
name: item.Name,
description: item.Description,
background: item.Background,
body: item.Body,
mouth: item.Mouth,
eyes: item.Eyes,
head_gear: item.Head_Gear,
tokenId: item.tokenId,
image: item.imagesIPFS,
});
});
Because .forEach is a function to be used in synchronous context and NFT.create returns a promise (that is async). So things happens out of order.
So one approach is to process the data first and then perform a batch operation using Promise.all.
const data = parsed.map(item => {
return models.NFT.create({
name: item.Name,
description: item.Description,
background: item.Background,
body: item.Body,
mouth: item.Mouth,
eyes: item.Eyes,
head_gear: item.Head_Gear,
tokenId: item.tokenId,
image: item.imagesIPFS,
})
})
const results = await Promise.all(data)
The main difference here is Promise.all resolves the N promises NFT.create in an async context in paralell. But if you are careful about the number of concurrent metadata that data may be too big to process in parallel, then you can use an async iteration provided by bluebird's Promise.map library.
const Promise = require('bluebird')
const data = await Promise.map(parsed, item => {
return models.NFT.create({
name: item.Name,
description: item.Description,
background: item.Background,
body: item.Body,
mouth: item.Mouth,
eyes: item.Eyes,
head_gear: item.Head_Gear,
tokenId: item.tokenId,
image: item.imagesIPFS,
})
})
return data
I'm trying to implement PayPal payments on my website, I can create a payment once the user requests and I send him the redirect approval URL, after clients pay I call the execution on my backend with the payment ID and buyer ID, but I receive the error below
{"response":{"name":"VALIDATION_ERROR","message":"Invalid request - see details","debug_id":"4f3a6da7e0c7d","details":[{"location":"body","issue":"MALFORMED_REQUEST_JSON"}],"links":[],"httpStatusCode":400},"httpStatusCode":400}
I have been trying everything for a few hours, I tried to copy the create payment JSON from anywhere including PayPal API example and still, nothing works.
Also, I wanna redirect the client to success page just after transaction is approved (execute), it has to be handled by the front?
Code
const paypal = require('paypal-rest-sdk');
const dbService = require('../../services/mongodb-service');
const { ObjectId } = require('mongodb');
const getProducts = async () => {
const products = await dbService.getCollection('products');
return await products.find({}).toArray();
}
paypal.configure({
'mode': 'sandbox',
'client_id': 'id',
'client_secret': 'secret'
});
const createPayment = async (productId) => {
const products = await dbService.getCollection('products');
const product = await products.findOne({ '_id': ObjectId(productId) })
if (!product) return Promise.reject('Product not found');
const payment = {
"intent": "sale",
"payer": {
"payment_method": "paypal"
},
"transactions": [{
"amount": {
"currency": "USD",
"total": product.price,
},
"description": product.description,
"payment_options": {
"allowed_payment_method": "IMMEDIATE_PAY"
},
"item_list": {
"items": [{
"name": product.name,
"description": product.description,
"quantity": 1,
"price": product.price,
"tax": 0,
"sku": product._id,
"currency": "USD"
}]
}
}],
"redirect_urls": {
"return_url": "http://localhost:3000/purchase-success",
"cancel_url": "http://localhost:3000/purchase-error"
}
}
const transaction = await _createPay(payment);
const redirect = transaction.links.find(link => link.method === 'REDIRECT');
return redirect;
}
const _createPay = (payment) => {
return new Promise((resolve, reject) => {
paypal.payment.create(payment, (err, payment) => err ? reject(err) : resolve(payment));
});
}
const executePayment = async (paymentId, payerId) => {
try {
const execute = await _executePay(paymentId, payerId);
console.log(execute);
return execute;
} catch (err) { console.log(JSON.stringify(err)) }
}
const _executePay = (paymentId, payerId) => {
return new Promise((resolve, reject) => {
console.log(paymentId, payerId);
paypal.payment.execute(paymentId, payerId, (error, payment) => {
return error ? reject(error) : resolve(JSON.stringify(payment));
})
})
}
module.exports = {
createPayment,
executePayment,
getProducts
}
should be
const _executePay = (paymentId, payerId) => {
return new Promise((resolve, reject) => {
console.log(paymentId, payerId);
var payerIdObj = { payer_id: payerId };
paypal.payment.execute(paymentId, payerIdObj, (error, payment) => {
return error ? reject(error) : resolve(JSON.stringify(payment));
})
})
}
the doc
I was able to resolve it by doing a post request, code:
const _executePay = async (paymentId, payerId) => {
const response = await axios.post(`https://api.sandbox.paypal.com/v1/payments/payment/${paymentId}/execute`, { 'payer_id': payerId }, {
auth: {
username: CLIENT_ID,
password: CLIENT_SECRET
}
})
return response;
}
I am trying to convert an XML file into JSON using xml2js on Node.JS.
When I hit an attribute, it will give a '_' and '$' characters as replacement.
I am fully aware that JSON does not have the concept of attributes that XML does.
How do I convert the following XML document:
<id>
<name language="en">Bob</name>
<name>Alice</name>
</id>
Into a JSON format something like:
{
"id": {
"name": [{
"language": "en",
"text": "bob"
}, "alice"]
}
}
My code in Node.JS is:
const fs = require('fs');
const util = require('util');
const json = require('json');
const xml2js = require('xml2js');
const xml = fs.readFileSync('./test.xml', 'utf-8', (err, data) => {
if (err) throw err;
});
const jsonStr = xml2js.parseString(xml, function (err, result) {
if (err) throw err;
console.log(util.inspect(JSON.parse(JSON.stringify(result)), { depth: null }));
});
The current output is:
{ id: { name: [ { _: 'Bob', '$': { language: 'en' } }, 'Alice' ] } }
will output
{
id: { name: [ { language: 'en', text: 'Bob' }, { text: 'Alice' } ] }
}
the code:
const fs = require('fs');
const util = require('util');
const json = require('json');
const xml2js = require('xml2js');
const xml = fs.readFileSync('./test.xml', 'utf-8', (err, data) => {
if (err) throw err;
});
const jsonStr = xml2js.parseString(xml, function (err, result) {
const nameArray = result.id.name;
const newNameArray = nameArray.map(nameValue => {
let text = '';
let attributes = {};
if (typeof nameValue === 'string') {
text = nameValue
} else if (typeof nameValue === 'object') {
text = nameValue['_']
attributes = nameValue['$']
}
return {
...attributes,
text
}
})
const newResult = {
id: {
name: newNameArray
}
}
if (err) throw err;
console.log(util.inspect(JSON.parse(JSON.stringify(newResult)), { depth: null }));
});
sth like this
const xml = `
<id>
<name language="en">Bob</name>
<name>Alice</name>
</id>`
const { transform } = require('camaro')
const template = {
id: {
name: ['id/name', {
lang: '#language',
text: '.'
}]
}
}
;(async function () {
console.log(JSON.stringify(await transform(xml, template), null, 4))
})()
output
{
"id": {
"name": [
{
"lang": "en",
"text": "Bob"
},
{
"lang": "",
"text": "Alice"
}
]
}
}
I am starting the ginit module but getting the error like this:
=> octokit.authenticate() is deprecated. Use "auth" constructor
option instead.
How can I fix it?
my code
module.exports = {
getInstance: () => {
return octokit;
},
setGithubCredentials : async () => {
const credentials = await inquirer.askGithubCredentials();
octokit.authenticate(
_.extend(
{
type: 'basic',
},
credentials
)
);
},
}
Maybe you code from this article: https://www.sitepoint.com/javascript-command-line-interface-cli-node-js/
And my solution is below
const Octokit = require("#octokit/rest");
const Configstore = require("configstore");
const pkg = require("../package.json");
const _ = require("lodash");
const CLI = require("clui");
const Spinner = CLI.Spinner;
const chalk = require("chalk");
const inquirer = require("./inquirer");
const conf = new Configstore(pkg.name);
module.exports = {
getInstance: () => {
return global.octokit;
},
getStoredGithubToken: () => {
return conf.get("github.token");
},
setGithubCredentials: async () => {
const credentials = await inquirer.askGithubCredentials();
const result = _.extend(
{
type: "basic"
},
credentials
);
global.octokit = Octokit({
auth: result
});
},
registerNewToken: async () => {
const status = new Spinner("Authenticating you, please wait...");
status.start();
try {
const response = await global.octokit.oauthAuthorizations.createAuthorization({
scopes: ["user", "public_repo", "repo", "repo:status"],
note: "ginits, the command-line tool for initalizing Git repos"
});
const token = response.data.token;
if (token) {
conf.set("github.token", token);
return token;
} else {
throw new Error(
"Missing Token",
"GitHub token was not found in the response"
);
}
} catch (err) {
throw err;
} finally {
status.stop();
}
}
};
Try something like:
const Octokit = require('#octokit/rest');
module.export = {
getInstance({username, password}) {
return Octokit({
auth: {
username,
password,
},
});
}
}
The PR introducing the auth property shows some other examples of specifying credentials.