Handle XML attributes when converting XML to JSON using Node.JS xml2js - node.js

I am trying to convert an XML file into JSON using xml2js on Node.JS.
When I hit an attribute, it will give a '_' and '$' characters as replacement.
I am fully aware that JSON does not have the concept of attributes that XML does.
How do I convert the following XML document:
<id>
<name language="en">Bob</name>
<name>Alice</name>
</id>
Into a JSON format something like:
{
"id": {
"name": [{
"language": "en",
"text": "bob"
}, "alice"]
}
}
My code in Node.JS is:
const fs = require('fs');
const util = require('util');
const json = require('json');
const xml2js = require('xml2js');
const xml = fs.readFileSync('./test.xml', 'utf-8', (err, data) => {
if (err) throw err;
});
const jsonStr = xml2js.parseString(xml, function (err, result) {
if (err) throw err;
console.log(util.inspect(JSON.parse(JSON.stringify(result)), { depth: null }));
});
The current output is:
{ id: { name: [ { _: 'Bob', '$': { language: 'en' } }, 'Alice' ] } }

will output
{
id: { name: [ { language: 'en', text: 'Bob' }, { text: 'Alice' } ] }
}
the code:
const fs = require('fs');
const util = require('util');
const json = require('json');
const xml2js = require('xml2js');
const xml = fs.readFileSync('./test.xml', 'utf-8', (err, data) => {
if (err) throw err;
});
const jsonStr = xml2js.parseString(xml, function (err, result) {
const nameArray = result.id.name;
const newNameArray = nameArray.map(nameValue => {
let text = '';
let attributes = {};
if (typeof nameValue === 'string') {
text = nameValue
} else if (typeof nameValue === 'object') {
text = nameValue['_']
attributes = nameValue['$']
}
return {
...attributes,
text
}
})
const newResult = {
id: {
name: newNameArray
}
}
if (err) throw err;
console.log(util.inspect(JSON.parse(JSON.stringify(newResult)), { depth: null }));
});

sth like this
const xml = `
<id>
<name language="en">Bob</name>
<name>Alice</name>
</id>`
const { transform } = require('camaro')
const template = {
id: {
name: ['id/name', {
lang: '#language',
text: '.'
}]
}
}
;(async function () {
console.log(JSON.stringify(await transform(xml, template), null, 4))
})()
output
{
"id": {
"name": [
{
"lang": "en",
"text": "Bob"
},
{
"lang": "",
"text": "Alice"
}
]
}
}

Related

Trouble with return statement in Node Async Await

Trying to write a query for DynamoDB and learn promises etc. The console.log(resp.Items) returns the object that I am looking for so I think my query is formatted correctly. I get a status 200 back with an empty object.
I have read up for a few days and tried to implement various changes to the code by nothing is returning the object resp.Items. I am guessing the function is returning before the const is updated with the data but I am not sure why the console.log works.
const AWS = require('aws-sdk')
const dynamodb = new AWS.DynamoDB()
const getTechs = async () => {
try {
const resp = await dynamodb
.query({
ExpressionAttributeValues: {
':tech': { S: 'TECH#' },
},
KeyConditionExpression: 'PK = :tech',
TableName: process.env.TABLE_NAME,
ScanIndexForward: true,
})
.promise()
console.log(resp.Items)
if (!resp.Items) {
return {
error: 'No Techs in the DB',
}
}
return {
tech: resp.Items,
}
} catch (error) {
console.log('Error retrieving Tech List')
console.log(error)
return {
error: 'Could not retrieve Tech List',
}
}
}
handler func
const { makeHandler } = require('./utils')
const { getTechs } = require('../data')
// const { Tech } = require('../entities')
const inputSchema = {
type: 'object',
properties: {
pathParameters: {
type: 'object',
properties: {
tech: { type: 'string' },
},
required: ['tech'],
},
},
required: ['pathParameters'],
}
const handler = async (event) => {
const { techs, error } = await getTechs()
const statusCode = error ? 500 : 200
const body = error ? JSON.stringify({ error }) : JSON.stringify({ techs })
return {
statusCode,
body,
}
}
module.exports.handler = makeHandler({ handler })
executeTransactWrite func
const executeTransactWrite = async ({ tech, params }) => {
const transactionRequest = tech.transactWriteItems(params)
let cancellationReasons
transactionRequest.on('extractError', (response) => {
try {
cancellationReasons = JSON.parse(
response.httpResponse.body.toString()
).CancellationReasons
} catch (err) {
// suppress this just in case some types of errors aren't JSON parseable
console.error('Error extracting cancellation error', err)
}
})
return new Promise((resolve, reject) => {
transactionRequest.send((err, response) => {
if (err) {
err.cancellationReasons = cancellationReasons
return reject(err)
}
return resolve(response)
})
})
}
module.exports = {
executeTransactWrite,
makehandler func
const middy = require('middy')
const {
jsonBodyParser,
validator,
httpErrorHandler,
} = require('middy/middlewares')
const makeHandler = ({ handler, inputSchema }) =>
middy(handler)
.use(jsonBodyParser())
.use(validator({ inputSchema }))
.use(httpErrorHandler())
module.exports = { makeHandler }

nodeJS : Connection between tables within a JSON structure

I have a Question object and a Tags object
Tags contains the topics that match the question
Each question can have several tags
I want to connect them in a one-to-many relationship but I dont know how.
my code:
Question.Json:
`[
{
"id": "0f16b716-450c-4468-8096-74e6e4d4c16f",
"lastUpdate": "13/01/2023 09:13:15",
"title": "aaa",
"typeId": 1,
"textBelow": "aaa",
"tags": [
{
"id": "57f2aa4c-5ad2-438a-a1b4-9c5c018d66b4",
"name": "math"
}
]
}
]`
Tags.Json:
`[
{
"id": "57f2aa4c-5ad2-438a-a1b4-9c5c018d66b4",
"name": "math"
}
]
`
my Repositorey:
const { readFile, updateItem, removeItem, insertItem } = require('../fs/fs');
const { v4 } = require('uuid');
class DBQuestionsRepository {
async getAllQuestions() {
const data = await readFile('./data/jsonAsDb.json');
const types = await readFile('./data/questionType.json');
data.forEach((data) => {
data.type = types.find((type) => type.id === Number(data.typeId));
delete data.typeId;
});
return data;
}
async addQuestion(body) {
const currentDate = new Date();
const formattedDate = `${currentDate.getDate().toString().padStart(2, '0')}/${(currentDate.getMonth() + 1).toString().padStart(2, '0')}/${currentDate.getFullYear()} ${currentDate.getHours().toString().padStart(2, '0')}:${currentDate.getMinutes().toString().padStart(2, '0')}:${currentDate.getSeconds().toString().padStart(2, '0')}`;
const item = insertItem
('./data/jsonAsDb.json', { id: v4(), lastUpdate: formattedDate,...body });
return item;
}
async getQuestionById(id) {
const data = await readFile('./data/jsonAsDb.json');
const item = data.find(i => i.id === id);
return item;
}
I tried to connect the tables

API Only sends 1 chunk of metadata when called

I have a problem with my API that sends metadata when called from my smart contract of website. Its NFT tokens and my database is postgres and API is node.js
The problem is when I mint 1 NFT metadata works perfect, but if I mint 2 or more it will only ever send 1 chunk of data? So only 1 NFT will mint properly and the rest with no data?
Do I need to set a loop function or delay? Does anyone have any experience with this?
Any help would be much appreciated.
Below is the code from the "controller" folder labeled "nft.js"
const models = require("../../models/index");
const path = require("path");
const fs = require("fs");
module.exports = {
create_nft: async (req, res, next) => {
try {
const dir = path.resolve(__dirname + `../../../data/traitsfinal.json`);
const readCards = fs.readFileSync(dir, "utf8");
const parsed = JSON.parse(readCards);
console.log("ya data ha final ??", parsed);
parsed.forEach(async (item) => {
// return res.json(item)
let newNft = await models.NFT.create({
name: item.Name,
description: item.Description,
background: item.Background,
body: item.Body,
mouth: item.Mouth,
eyes: item.Eyes,
head_gear: item.Head_Gear,
tokenId: item.tokenId,
image: item.imagesIPFS,
});
});
return res.json({
data: "nft created",
error: null,
success: true,
});
} catch (error) {
console.log("server error", error.message);
next(error);
}
},
get_nft: async (req, res, next) => {
try {
const { id } = req.params;
// console.log("id ?????????",id)
// console.log("type of ",typeof(id))
// const n=Number(id)
// console.log("type of ",typeof(id))
const nft = await models.NFT.findByPk(id);
if (!nft) {
throw new Error("Token ID invalid");
}
if (!nft.isMinted) {
throw new Error("Token not minted");
}
console.log(nft);
// }
const resObj = {
name: nft.name,
description: nft.description,
image: `https://gateway.pinata.cloud/ipfs/${nft.image}`,
attributes: [
{ trait_type: "background", value: `${nft.background}` },
{ trait_type: "body", value: `${nft.body}` },
{ trait_type: "mouth", value: `${nft.mouth}` },
{ trait_type: "eyes", value: `${nft.eyes}` },
{ trait_type: "tokenId", value: `${nft.tokenId}` },
{
display_type: "number",
trait_type: "Serial No.",
value: id,
max_value: 1000,
},
],
};
return res.json(resObj);
} catch (error) {
console.log("server error", error.message);
next(error);
}
},
get_nft_all: async (req, res, next) => {
try {
// console.log("id ?????????",id)
// console.log("type of ",typeof(id))
// const n=Number(id)
// console.log("type of ",typeof(id))
const nft = await models.NFT.findAndCountAll({
limit: 10
});
// console.log(nft);
if (!nft) {
throw new Error("Token ID invalid");
}
// if (nft.isMinted) {
// throw new Error("Token not minted");
// }
// console.log(nft);
// }
var resObjarr = [];
for (var i = 0; i < nft.rows.length; i++) {
resObj = {
name: nft.rows[i].name,
description: nft.rows[i].description,
image: `https://gateway.pinata.cloud/ipfs/${nft.rows[i].image}`,
attributes: [
{ trait_type: "background", value: `${nft.rows[i].background}` },
{ trait_type: "body", value: `${nft.rows[i].body}` },
{ trait_type: "mouth", value: `${nft.rows[i].mouth}` },
{ trait_type: "eyes", value: `${nft.rows[i].eyes}` },
{ trait_type: "tokenId", value: `${nft.rows[i].tokenId}` },
{
display_type: "number",
trait_type: "Serial No.",
value: nft.rows[i].id,
max_value: 1000,
},
],
};
resObjarr.push(resObj);
}
console.log(JSON.stringify(resObjarr))
return res.json(resObjarr);
} catch (error) {
console.log("server error", error.message);
next(error);
}
},
mint: async (req, res, next) => {
try {
const { id } = req.params;
const updated = await models.NFT.findByPk(id);
if (!updated) {
throw new Error("NFT ID invalid");
}
if (updated.isMinted) {
throw new Error("NFT Already minted");
}
updated.isMinted = true;
updated.save();
return res.json({
data: "Token minted successfully",
error: null,
success: true,
});
} catch (error) {
console.log("server error", error.message);
next(error);
}
},
};
Below is from the routes folder.
const router = require("express").Router();
const auth=require("../middleware/auth")
const {
create_nft,
get_nft,
get_nft_all,
mint
} = require("../controller/nft");
router.post(
"/create",
create_nft
);
router.get(
"/metadata/:id",
get_nft
);
router.get(
"/metadata",
get_nft_all
);
router.put(
"/mint/:id",
mint
);
module.exports = router;
Looking your code,you may having some kind of asyncrhonous issue in this part:
parsed.forEach(async (item) => {
// return res.json(item)
let newNft = await models.NFT.create({
name: item.Name,
description: item.Description,
background: item.Background,
body: item.Body,
mouth: item.Mouth,
eyes: item.Eyes,
head_gear: item.Head_Gear,
tokenId: item.tokenId,
image: item.imagesIPFS,
});
});
Because .forEach is a function to be used in synchronous context and NFT.create returns a promise (that is async). So things happens out of order.
So one approach is to process the data first and then perform a batch operation using Promise.all.
const data = parsed.map(item => {
return models.NFT.create({
name: item.Name,
description: item.Description,
background: item.Background,
body: item.Body,
mouth: item.Mouth,
eyes: item.Eyes,
head_gear: item.Head_Gear,
tokenId: item.tokenId,
image: item.imagesIPFS,
})
})
const results = await Promise.all(data)
The main difference here is Promise.all resolves the N promises NFT.create in an async context in paralell. But if you are careful about the number of concurrent metadata that data may be too big to process in parallel, then you can use an async iteration provided by bluebird's Promise.map library.
const Promise = require('bluebird')
const data = await Promise.map(parsed, item => {
return models.NFT.create({
name: item.Name,
description: item.Description,
background: item.Background,
body: item.Body,
mouth: item.Mouth,
eyes: item.Eyes,
head_gear: item.Head_Gear,
tokenId: item.tokenId,
image: item.imagesIPFS,
})
})
return data

AWS #aws-sdk/lib-dynamodb 'Cannot read property '0' of undefined'

I try to leverage #aws-sdk/lib-dynamodb for simplifying data retrieval from AWS DynamoDB.
# aws.js
const {DynamoDBClient,ScanCommand} = require('#aws-sdk/client-dynamodb');
const { DynamoDBDocumentClient } = require('#aws-sdk/lib-dynamodb');
const { fromIni } = require('#aws-sdk/credential-provider-ini');
const client = new DynamoDBClient({
credentials: fromIni({ profile: process.env.AWS_PROFILE }),
region: process.env.AWS_DEFAULT_REGION,
});
const ddb_client = DynamoDBDocumentClient.from(client);
const listItemsDDB = async (params) => {
try {
const command = new ScanCommand(params);
const data = await ddb_client.send(command);
return data;
} catch (err) {
throw Error(err);
}
};
module.exports = {listItemsDDB };
I import the functions into my rest_api.js.
Everything is based on an express js application.
...trimmed...
app.get('/courses/:year/:location/:month', async (req, res, next) => {
console.log(req.params);
const payload = {
TableName: ddb_calendar_name,
FilterExpression: '#l = :loc and #m = :month and #y = :year',
ExpressionAttributeValues: {
':loc': req.params.location,
':month': req.params.month,
':year': req.params.year,
},
ExpressionAttributeNames: {
'#l': 'Location',
'#m': 'Month',
'#y': 'Year',
},
};
try {
const result = await aws.listItemsDDB(payload);
return res.status(200).send(result.Items[0]);
} catch (error) {
next(error);
}
});
My data looks like this:
{
"Id": {
"S": "02322f8f-05a7-4e27-aaf9-08129f3128ef"
},
"Location": {
"S": "schildergasse"
},
"Month": {
"S": "august"
},
"Year": {
"S": "2021"
}
}
I use a rest client to test the endpoints.
After firing GET http://localhost:4000/courses/2021/schildergasse/august I receive:
Error: TypeError: Cannot read property '0' of undefined
at Object.listItemsDDB (/Users/olivergoetz/fifi-calendar/server/aws.js:39:9)
at processTicksAndRejections (node:internal/process/task_queues:96:5)
at async /Users/olivergoetz/fifi-calendar/server/rest_api.js:221:18
What point to I miss here?
I imported the ScanCommand from the wrong library:
I had to change this:
const {DynamoDBClient,ScanCommand} = require('#aws-sdk/client-dynamodb');
const { DynamoDBDocumentClient } = require('#aws-sdk/lib-dynamodb');
to this:
const {DynamoDBClient} = require('#aws-sdk/client-dynamodb');
const { DynamoDBDocumentClient, ScanCommand } = require('#aws-sdk/lib-dynamodb');

Fetching data axios

I am fetching data from an api and its response is something like this
{
"id": "BTC",
"currency": "BTC",
"symbol": "BTC",
"name": "Bitcoin",
"1d": {
"volume": "47854345429.71",
"volume_change": "42758948175.92",
},
"7d": {
"volume": "308524476736.00",
"volume_change": "42758948175.92",
},
Here, I am using inquirer to get specific option.. and I am trying fetch information for a specific interval i.e ( 1d, 7d, 30d,..) from an API
async volumechng(cmd) {
try {
keyManager = new KeyManager();
const key = keyManager.getKey();
const api = new CryptoAPI(key)
const opts = await inquirer.prompt([
{
type: 'rawlist',
name: 'volumechng',
message: 'Volume change for interval ?',
choices: [
'1d',
'7d',
],
}
])
const volumeChange = await api.getVolumeChange(cmd,opts.volumechng)
} catch (err) {
console.log(err)
}
}
async getVolumeChange(coinOption,interval){
try{
const res = await axios.get(
`${this.baseUrl}?key=${this.apiKey}&ids=${coinOption}`
)
let output = ''
// console.log(interval) --> getting an output
res.data.forEach(coin => {
output += `
${coin.interval.volume_change}
`
})
return output
}
catch (err) {
handleAPIError(err);
}
}
I am getting the value of interval on console.log() but I am not getting the output
Let check:
console.log('Data response', res.data)
console.log("Interval: ", interval)
If you provide more output of them, I can help you more.
Please log to console too
res.data.forEach(coin => {
console.log("Coin ", coin)
output += `
${coin.interval.volume_change}
`
})
to check what your output is.

Resources