I have implemented VirtualizedTable for multi-sorting columns. What I have found is that you are unable to sort on other columns after rendering.
The following is my sort function in the class
sort = ({ event, sortBy, sortDirection }) => {
const nosort = ['input', 'textarea', 'select', 'option', 'span'].indexOf(event.target.tagName.toLowerCase()) !== -1
if (!nosort && this.props.disableSort !== true) {
this.setState((prevState, props) => ({ sortBy, sortDirection: sortBy === prevState.sortBy ? sortDirection : 'ASC' }))
}
}
As per documentation, this is my sortState
sortState = () => createTableMultiSort(this.sort, defaultSort);
The following is what I am injecting to the component as props
const defaultSort = () => {
return {
defaultSortBy: ['firstColumn', 'secondColumn'],
defaultSortDirection: {
firstColumn: 'ASC',
secondColumn: 'ASC',
}
}
}
This is my headerRenderer
headerRenderer = (tableWidth) => ({ columnData, dataKey, disableSort, label, sortBy, sortDirection }) => {
const showSortIndicator = this.sortState().sortBy.includes(dataKey);
return (
<React.Fragment key={dataKey}>
<div className="ReactVirtualized__Table__headerTruncatedText" title={label}>
{label}
</div>
{showSortIndicator && <SortIndicator key="SortIndicator" sortDirection={this.sortState().sortDirection[dataKey]} />}
<Draggable
axis="x"
defaultClassName="DragHandle"
defaultClassNameDragging="DragHandleActive"
onDrag={(event, { deltaX }) => { this.resizeRow({ dataKey, deltaX, tableWidth }) } }
position={{ x: 0 }}
zIndex={999}
>
<span className="DragHandleIcon" title="Drag icon to expand/collapse the column">⋮</span>
</Draggable>
</React.Fragment>
)
I have read through the code createMultiSort and debugging my own code to see why it had not worked.
Basically, the showSortIndicator is not true, as the sortBy does not contain the column key.
I have tried to input the name of the column in defaultSort as a data type. But that does not seem to work.
What can I do to enable sorting on other columns after rendering? How would those columns be sorted if default sorting has been specified?
TIA
Patrick
Worked out after some time debugging and looking over source code. Seems that you can't specify the defaultSortBy separately as a props to the element. You need to specify when you create the element. Hence as per doc -
const sortState = createMultiSort(sort, {
defaultSortBy: ['firstName', 'lastName'],
defaultSortDirection: {
firstName: 'ASC',
lastName: 'ASC',
},
});
Related
I have an array of objects like with the same setup as the one below. I wanna insert this array of objects into postgres table that looks like this:[![Table setup][1]][1]
I have tried to make a function below but it returns error when inserting UNGDOMSKOLE because this is a string so it doesn't understand the space so it crashes on the second input value. How can I make it understand it is a string?
{
'#type': 'SensorSystem',
id: 'SN47230',
name: 'ÅKRA UNGDOMSSKOLE',
shortName: 'Åkra ',
country: 'Norge',
countryCode: 'NO',
geometry: {
'#type': 'Point',
coordinates: [ 5.1963, 59.2555 ],
nearest: false
},
masl: 18,
validFrom: '2013-10-29T00:00:00.000Z',
county: 'ROGALAND',
countyId: 11,
municipality: 'KARMØY',
municipalityId: 1149,
stationHolders: [ 'KARMØY KOMMUNE' ],
externalIds: [ '506131077' ],
wigosId: '0-578-0-47230'
}
Error code:
error: syntax error at or near "UNGDOMSSKOLE"
What I have tried so far:
let sqlinsert= data.data.map((source)=>{
if (source.geometry) {
if(!source.masl){
source.masl=0
}
let Point = `POINT(${source.geometry.coordinates[0]} ${source.geometry.coordinates[1]})`;
return `(${source.id}, ${source.name}, ${source.shortName},${source.country},${source.countryCode},${source.masl},${source.geometry.coordinates[0]},${source.geometry.coordinates[1]},${Point},${source.validFrom},${source.county},${source.countyId},${source.municipality},${source.municipalityId})`
}
})
const result = await db.query("INSERT INTO sources(source_id,name,shortName,country,countryCode,masl,long,lat,geog,valid_from,county,countyId,municipality,municipalityId) values"+sqlinsert[0])
A second problem I have with this is that inserting
POINT(59.2555 5.1963)
Gives a syntax error at 5.1963
[1]: https://i.stack.imgur.com/4RSkq.png
The main problem with your query as written is that you are adding raw, unescaped values into your VALUES records. You can use escapeLiteral on your db client to ensure that these values are properly escaped which will solve the syntax errors you are getting:
const data = [
{
"#type": "SensorSystem",
id: "SN47230",
name: "ÅKRA UNGDOMSSKOLE",
shortName: "Åkra ",
country: "Norge",
countryCode: "NO",
geometry: {
"#type": "Point",
coordinates: [5.1963, 59.2555],
nearest: false,
},
masl: 18,
validFrom: "2013-10-29T00:00:00.000Z",
county: "ROGALAND",
countyId: 11,
municipality: "KARMØY",
municipalityId: 1149,
stationHolders: ["KARMØY KOMMUNE"],
externalIds: ["506131077"],
wigosId: "0-578-0-47230",
},
].map((source) => {
const {
id,
name,
shortName,
country,
countryCode,
masl,
geometry: {
// the coordinates in your source data appear to be in y,x instead of
// x,y. Treating them as x,y results in the point being located
// in the Indian Ocean while y,x is somewhere in Norway.
coordinates: [lat, long],
},
validFrom,
county,
countyId,
municipality,
municipalityId,
} = source;
return [
id,
name,
shortName,
country,
countryCode,
masl || 0,
long,
lat,
`POINT( ${long} ${lat} )`,
validFrom,
county,
countyId,
municipality,
municipalityId,
];
});
const headers = [
"source_id",
"name",
"shortname",
"country",
"countrycode",
"masl",
"long",
"lat",
"geog",
"valid_from",
"county",
"countyid",
"municipality",
"municipalityid",
];
const sourceValStr = data
.map((sourceRecords, rowIndex) => {
return sourceRecords
.map((value, colIndex) => {
if (typeof value === "string") {
// safely escape string values
return dbClient.escapeLiteral(value);
}
if (
typeof value === "number" ||
typeof value === "boolean" ||
typeof value === "bigint"
) {
return value;
}
if (value === undefined || value === null) {
return "null";
}
throw new Error(
`non-simple value: ${JSON.stringify(value)} for ${
headers[colIndex]
} at row ${rowIndex}`
);
})
.join(",");
})
.map((value) => `(${value})`)
.join(",");
const sourceInsert = `INSERT INTO sources(${headers.join(
","
)}) VALUES ${sourceValStr};`;
await dbClient.query(sourceInsert);
A much more efficient and scalable way to insert the rows is to use the pg-copy-streams library in conjunction with a CSV library like csv-stringify which will bulk insert using a COPY FROM stream:
import { from as copyFrom } from "pg-copy-streams";
import { stringify } from "csv-stringify";
// ...
const copyStmt = `COPY sources(${headers.join(
","
)}) FROM STDIN (FORMAT CSV)`;
await new Promise<void>((resolve, reject) => {
const copyStream = dbClient.query(copyFrom(copyStmt));
const stringifiedStream = stringify(data, {
header: false,
encoding: "utf-8",
delimiter: ",",
quote: "\"",
});
stringifiedStream
.on("error", (err) => {
reject(err);
})
.on("end", () => resolve());
stringifiedStream.pipe(copyStream);
});
On my low-end laptop, this approach takes about 39 seconds to insert a million rows with no database optimizations.
i am fetching details from database through Id wanted to display that into the table, but for initial purpose i wanted to just display on browser without table and stuff.. am getting values.map is not a function but when I did console.log(values), got {title: "", description: "", tags: "", photo: "", loading: false, …}
createdBlog: ""
description: ""
error: ""
formData: ""
getaRedirect: false
loading: false
photo: ""
tags: ""
title: ""
proto: Object
what to do, please help
import React, { useState, useEffect } from "react";
import "../../styles.css";
import { getoneBlog } from "../helper/coreapicalls";
import ImageHelper from "../helper/ImageHelper";
const Fullblog = ({ match }) => {
const [values, setValues] = useState({
title: "",
description: "",
tags: "",
photo: "",
loading: false,
error: "",
createdBlog: "",
getaRedirect: false,
formData: "",
});
const {
title,
description,
tags,
loading,
error,
createdBlog,
getaRedirect,
formData,
} = values;
const preload = (blogId) => {
getoneBlog(blogId).then((data) => {
//console.log(data);
if (data.error) {
setValues({ ...values, error: data.error });
} else {
// preloadCategories();
setValues({
...values,
title: data.title,
description: data.description,
tags: data.tags,
formData: new FormData(),
});
}
});
};
console.log(values);
useEffect(() => {
preload(match.params.blogId);
}, []);
return (
<div>
<div className="py-md-5 py-3">
<div className="Fullblog ">
{values.map((fullblog, index) => {
return (
<div>
<h1 className="FullblogTittle">
Founder Leandra Medine Cohen announced the news to her
employees on a Zoom call earlier this week.
{fullblog.title}
</h1>
<p className="tags">tags </p>
<img
src="https://cdn.pixabay.com/photo/2020/10/17/17/41/girl-5662873_960_720.jpg"
className="FullblogImg"
alt="img"
/>
<ImageHelper />
<p className="description">
CULTURE How to Celebrate Halloween at Home This Year From
horror movie marathons to Halloween-themed drive-in features
to virtual pumpkin carving parties, here's how to celebrate
Halloween safely this year. By Pahull Bains Date October 22,
2020 With cases on the rise in certain regions of Ontario ’s A
Little Blurry. The livestream will be viewable on demand for
24 hours for ticket holders. Get your tickets here.
</p>
</div>
);
})}
</div>
</div>
</div>
);
};
export default Fullblog;
coreapicalls code-
// get all Blogs
export const getBlogs = () => {
return fetch(`${API}/blogs`, {
method: "GET",
})
.then((response) => {
return response.json();
})
.catch((err) => console.log(err));
};
//get a Blog
export const getoneBlog = (blogId) => {
return fetch(`${API}blog/${blogId}`, {
method: "GET",
})
.then((response) => {
return response.json();
})
.catch((err) => console.log(err));
};
You can use .map() function on an Array but your state looks to be set up as an Object rather than an array.
Check these lines:
{values.map((fullblog, index) => {
&
const [values, setValues] = useState({ ... }); // This is an object
Reference
The map() method creates a new array populated with the results of
calling a provided function on every element in the calling array.
So the issue is, you are trying the map() on an object that's why it is showing you the error.
You can only use .map() on Array variables, as mentioned before.
You can simply do this:
Object.keys(values), which will make an Array with the keys of your object.
Object.keys(values).map( keys => console.log(values[key]) )
You used object destructuring on values but there is no such method as Object.prototype.map...
You can loop over Object.values(values) with map or Object.keys(values) if you want to loop over the keys.
Code:
Object.values(values).map((fullblog, index) => {
// insert code here
});
I have the following endpoint returning all the courses found in Db by default. I want to change it so that when req.query.class is empty it returns all records otherwise returns records conditionally.
router.get('/allcourses', checkAuth, (req, res) => {
const studentclass = req.query.class;
Course.find().select({ coursename: 1 }).select({ class: 1 }).select({ board: 1 }).select({ coursedescription: 1 }).then((data) => {
res.status(200).json({
courses: data
})
}).catch((err) => {
console.log(err)
return res.status(503).json({
message: 'Cannot retreive courses at this moment, try again later!'
})
})
})
I cannot figure out how to use studentclass variable.
If I do Course.find({class:studentclass}).select({ coursename: 1 }).select({ class: 1 }).select({ board: 1 }).select({ coursedescription: 1 }) it returns empty when req.query.class is empty (of course).
Just define an empty filter object and set the class property depending on the existence of the req.query.class variable:
const filter = {};
if(req.query.class) {
filter.class = req.query.class;
}
Course.find(filter).select(...)
Note that this would not set class in the filter if req.query.class yielded an empty string (i.e. ""), due to the JavaScript's truthy evaluation of a string. If you want to set the filter in that case, you need to explicitly check if the value is not undefined:
if(typeof req.query.class !== 'undefined') { ... }
We can construct a query by filtering out undefined properties, this will work when there are multiple fields in the query as well
const omitUndefined = (obj = {}) => Object.entries(obj)
.reduce((result, [key, value]) =>
({ ...result, ...(typeof value !== "undefined" && { [key]: value }) }) ,
{})
Course.find(omitUndefined({ class: req.query.class })).select(...)
/*
This will work for multiple fields, if all fields are undefined you will end up with an empty query object
Course.find(omitUndefined({ class: req.query.class, board: req.query.board })).select(...)
*/
I'm using aws-appsync with the apollo-client and when I try to execute a mutation without providing all fields I get a warning like "Missing field x in {...}". Do I really need to provide all (including optional) fields? How can I handle this gracefully?
I wonder if this is the expected behaviour or wether I'm missing something obvious. I don't want to maintain the added complexity of having to pass all optional fields and having those fields stored in the database as null values.
I figured since they are just warnings I 'll just ignore them but I found that the updates would be executed in the database, but then the inmemorycache cache would not always update. It would sometimes show the update and other times not.
import {compose, graphql} from "react-apollo";
import gql from "graphql-tag";
import React from "react";
export const EditCard = (props) => {
const handleSave = () => {
props.update({
givenName :'someGivenName',
//middleName omitted on purpose
familyName :'someFamilyName',
});
};
return (
<>...more stuff here...</>
);
};
export const card = gql`
fragment card on Identity{
givenName
middleName
familyName
}
`;
export const CardsGraphQL = gql`
query GerCards {
cards: listIdentitys(filter: {type: {eq: "CARD"}}) {
items {
...card
}
}
}
${card}
`;
export const UpdateCardGraphQL = gql`
mutation UpdateCard($input: UpdateIdentityInput!) {
updateObject: updateIdentity(input: $input) {
...card
}
}
${card}
`;
export const selectConfig = () => {
return {
options: {
fetchPolicy: 'cache-and-network',
},
props: (props) => {
return {
cards: props.data.cards ? props.data.cards.items : [],
};
},
};
};
export const updateConfig = (query) => {
return {
options: {
update: (cache, {data: {updateObject}}) => {
// Read query from cache
const data = cache.readQuery({query});
// Add updated object to the cache
data.cards.items = data.cards.items.map(item => item.id === updateObject.id ? updateObject : item);
//Overwrite the cache with the new results
cache.writeQuery({query, data});
},
},
props: (props) => {
return {
update: (input) => {
props.mutate({
variables: {input},
optimisticResponse: () => ({
updateObject: input,
}),
});
},
};
},
};
};
export default compose(
graphql(CardsGraphQL, selectConfig),
graphql(UpdateCardGraphQL, updateConfig(CardsGraphQL)))
(EditCard);
For GraphQL this mutation seems to run without problems and the result in the dynamoDB is what I expect:
{
givenName :'someGivenName',
familyName :'someFamilyName'
}
However the cache is not always updated with the mutation result and the apollo-client shows the warning:
"Missing field middleName in {..."
If I add the middleName field, the warning goes away and the cache updates correctly but the result in the dynamoDB is:
{
givenName :'someGivenName',
middleName : null,
familyName :'someFamilyName'
}
This approach results in additional complexity in my client that I would like to avoid maintaining.
Does anyone else have this problem? How to solve this gracefully?
Any help is appreciated.
I have query results from MongoDB as an array of documents with nested subdocuments and arrays of subdocuments.
[
{
RecordID: 9000,
RecordType: 'Item',
Location: {
_id: 5d0699326e310a6fde926a08,
LocationName: 'Example Location A'
}
Items: [
{
Title: 'Example Title A',
Format: {
_id: 5d0699326e310a6fde926a01,
FormatName: 'Example Format A'
}
},
{
Title: 'Example Title B',
Format: {
_id: 5d0699326e310a6fde926a01,
FormatName: 'Example Format B'
}
}
],
},
{
RecordID: 9001,
RecordType: 'Item',
Location: {
_id: 5d0699326e310a6fde926a08,
LocationName: 'Example Location C'
},
Items: [
{
Title: 'Example Title C',
Format: {
_id: 5d0699326e310a6fde926a01,
FormatName: 'Example Format C'
}
}
],
}
]
Problem
I need to export the results to XLSX in column order. The XLSX library is working to export the top-level properties (such as RecordID and RecordType) only. I also need to export the nested objects and arrays of objects. Given a list of property names e.g. RecordID, RecordType, Location.LocationName, Items.Title, Items.Format.FormatName the properties must be exported to XLSX columns in the specified order.
Desired result
Here is the desired 'flattened' structure (or something similar) that
I think should be able to convert to XLSX columns.
[
{
'RecordID': 9000,
'RecordType': 'Item',
'Location.LocationName': 'Example Location A',
'Items.Title': 'Example Title A, Example Title B',
'Items.Format.FormatName': 'Example Format A, Example Format B',
},
{
'RecordID': 9001,
'RecordType': 'Item',
'Location.LocationName': 'Example Location C',
'Items.Title': 'Example Title C',
'Items.Format.FormatName': 'Example Format C',
}
]
I am using the XLSX library to convert the query results to XLSX which works for top-level properties only.
const worksheet: XLSX.WorkSheet = XLSX.utils.json_to_sheet(results.data);
const workbook: XLSX.WorkBook = { Sheets: { 'data': worksheet }, SheetNames: ['data'] };
const excelBuffer: any = XLSX.write(workbook, { bookType: 'xlsx', type: 'array' });
const data: Blob = new Blob([excelBuffer], { type: EXCEL_TYPE });
FileSaver.saveAs(data, new Date().getTime());
POSSIBLE OPTIONS
I am guessing I need to 'flatten' the structure either using aggregation in the query or by performing post-processing when the query is returned.
Option 1: Build the logic in the MongoDB query to flatten the results.
$replaceRoot might work since it is able to "promote an existing embedded document to the top level". Although I am not sure if this will solve the problem exactly, I do not want to modify the documents in place, I just need to flatten the results for exporting.
Here is the MongoDB query I am using to produce the results:
records.find({ '$and': [ { RecordID: { '$gt': 9000 } } ]},
{ skip: 0, limit: 10, projection: { RecordID: 1, RecordType: 1, 'Items.Title': 1, 'Items.Location': 1 }});
Option 2: Iterate and flatten the results on the Node server
This is likely not the most performant option, but might be the easiest if I can't find a way to do so within the MongoDB query.
UPDATE:
I may be able to use MongoDB aggregate $project to 'flatten' the results. For example, this aggregate query effectively 'flattens' the results by 'renaming' the properties. I just need to figure out how to implement the query conditions within the aggregate operation.
db.records.aggregate({
$project: {
RecordID: 1,
RecordType: 1,
Title: '$Items.Title',
Format: '$Items.Format'
}
})
UPDATE 2:
I have abandoned the $project solution because I would need to change the entire API to support aggregation. Also, I would need to find a solution for populate because aggregate does not support it, rather, it uses $lookup which is possible but time consuming because I would need to write the queries dynamically. I am going back to look into how to flatten the object by creating a function to iterate the array of objects recursively.
Below is a solution for transforming the Mongo data on the server via a function flattenObject which recursively flattens nested objects and returns a 'dot-type' key for nested paths.
Note that the snippet below contains a function that renders and editable table to preview, however, the important part you want (download the file), should be triggered when you run the snippet and click the 'Download' button.
const flattenObject = (obj, prefix = '') =>
Object.keys(obj).reduce((acc, k) => {
const pre = prefix.length ? prefix + '.' : '';
if (typeof obj[k] === 'object') Object.assign(acc, flattenObject(obj[k], pre + k));
else acc[pre + k] = obj[k];
return acc;
}, {});
var data = [{
RecordID: 9000,
RecordType: "Item",
Location: {
_id: "5d0699326e310a6fde926a08",
LocationName: "Example Location A"
},
Items: [{
Title: "Example Title A",
Format: {
_id: "5d0699326e310a6fde926a01",
FormatName: "Example Format A"
}
},
{
Title: "Example Title B",
Format: {
_id: "5d0699326e310a6fde926a01",
FormatName: "Example Format B"
}
}
]
},
{
RecordID: 9001,
RecordType: "Item",
Location: {
_id: "5d0699326e310a6fde926a08",
LocationName: "Example Location C"
},
Items: [{
Title: "Example Title C",
Format: {
_id: "5d0699326e310a6fde926a01",
FormatName: "Example Format C"
}
}]
}
];
const EXCEL_MIME_TYPE = `application/vnd.ms-excel`;
const flattened = data.map(e => flattenObject(e));
const ws_default_header = XLSX.utils.json_to_sheet(flattened);
const ws_custom_header = XLSX.utils.json_to_sheet(flattened, {
header: ['Items.Title', 'RecordID', 'RecordType', 'Location.LocationName', 'Items.Format.FormatName']
});
const def_workbook = XLSX.WorkBook = {
Sheets: {
'data': ws_default_header
},
SheetNames: ['data']
}
const custom_workbook = XLSX.WorkBook = {
Sheets: {
'data': ws_custom_header
},
SheetNames: ['data']
}
const def_excelBuffer = XLSX.write(def_workbook, {
bookType: 'xlsx',
type: 'array'
});
const custom_excelBuffer = XLSX.write(custom_workbook, {
bookType: 'xlsx',
type: 'array'
});
const def_blob = new Blob([def_excelBuffer], {
type: EXCEL_MIME_TYPE
});
const custom_blob = new Blob([custom_excelBuffer], {
type: EXCEL_MIME_TYPE
});
const def_button = document.getElementById('dl-def')
/* trigger browser to download file */
def_button.onclick = e => {
e.preventDefault()
saveAs(def_blob, `${new Date().getTime()}.xlsx`);
}
const custom_button = document.getElementById('dl-cus')
/* trigger browser to download file */
custom_button.onclick = e => {
e.preventDefault()
saveAs(custom_blob, `${new Date().getTime()}.xlsx`);
}
/*
render editable table to preview (for SO convenience)
*/
const html_string_default = XLSX.utils.sheet_to_html(ws_default_header, {
id: "data-table",
editable: true
});
const html_string_custom = XLSX.utils.sheet_to_html(ws_custom_header, {
id: "data-table",
editable: true
});
document.getElementById("container").innerHTML = html_string_default;
document.getElementById("container-2").innerHTML = html_string_custom;
<script src="https://cdnjs.cloudflare.com/ajax/libs/xlsx/0.14.3/xlsx.full.min.js"></script>
<head>
<title>Excel file generation from JSON</title>
<meta charset="utf-8" />
<style>
.xport,
.btn {
display: inline;
text-align: center;
}
a {
text-decoration: none
}
#data-table,
#data-table th,
#data-table td {
border: 1px solid black
}
</style>
</head>
<script>
function render(type, fn, dl) {
var elt = document.getElementById('data-table');
var wb = XLSX.utils.table_to_book(elt, {
sheet: "Sheet JS"
});
return dl ?
XLSX.write(wb, {
bookType: type,
bookSST: true,
type: 'array'
}) :
XLSX.writeFile(wb, fn || ('SheetJSTableExport.' + (type || 'xlsx')));
}
</script>
<div>Default Header</div>
<div id="container"></div>
<br/>
<div>Custom Header</div>
<div id="container-2"></div>
<br/>
<table id="xport"></table>
<button type="button" id="dl-def">Download Default Header Config</button>
<button type="button" id="dl-cus">Download Custom Header Config</button>
<script src="https://cdnjs.cloudflare.com/ajax/libs/FileSaver.js/1.3.8/FileSaver.min.js"></script>
I wrote a function to iterate all object in the results array and create new flattened objects recursively. The flattenObject function shown here is similar to the previous answer and I took additional inspiration from this related answer.
The '_id' properties are specifically excluded from being added to the flattened object, since ObjectIds are still being returned as bson types even though I have the lean() option set.
I still need to figure out how to sort the objects such that they are in the order given e.g. RecordID, RecordType, Items.Title. I believe that might be easiest to achieve by creating a separate function to iterate the flattened results, although not necessarily the most performant. Let me know if anyone has any suggestions on how to achieve the object sorting by a given order or has any improvements to the solution.
const apiCtrl = {};
/**
* Async array iterator
*/
apiCtrl.asyncForEach = async (array, callback) => {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array)
}
}
// Check if a value is an object
const isObject = (val) => {
return typeof val == 'object' && val instanceof Object && !(val instanceof Array);
}
// Check if a value is a date object
const isDateObject = (val) => {
return Object.prototype.toString.call(val) === '[object Date]';
}
/**
* Iterate object properties recursively and flatten all values to top level properties
* #param {object} obj Object to flatten
* #param {string} prefix A string to hold the property name
* #param {string} res A temp object to store the current iteration
* Return a new object with all properties on the top level only
*
*/
const flattenObject = (obj, prefix = '', res = {}) =>
Object.entries(obj).reduce((acc, [key, val]) => {
const k = `${prefix}${key}`
// Skip _ids since they are returned as bson values
if (k.indexOf('_id') === -1) {
// Check if value is an object
if (isObject(val) && !isDateObject(val)) {
flattenObject(val, `${k}.`, acc)
// Check if value is an array
} else if (Array.isArray(val)) {
// Iterate each array value and call function recursively
val.map(element => {
flattenObject(element, `${k}.`, acc);
});
// If value is not an object or an array
} else if (val !== null & val !== 'undefined') {
// Check if property has a value already
if (res[k]) {
// Check for duplicate values
if (typeof res[k] === 'string' && res[k].indexOf(val) === -1) {
// Append value with a separator character at the beginning
res[k] += '; ' + val;
}
} else {
// Set value
res[k] = val;
}
}
}
return acc;
}, res);
/**
* Convert DB query results to an array of flattened objects
* Required to build a format that is exportable to csv, xlsx, etc.
* #param {array} results Results of DB query
* Return a new array of objects with all properties on the top level only
*/
apiCtrl.buildExportColumns = async (results) => {
const data = results.data;
let exportColumns = [];
if (data && data.length > 0) {
try {
// Iterate all records in results data array
await apiCtrl.asyncForEach(data, async (record) => {
// Convert the multi-level object to a flattened object
const flattenedObject = flattenObject(record);
// Push flattened object to array
exportColumns.push(flattenedObject);
});
} catch (e) {
console.error(e);
}
}
return exportColumns;
}