Why is the apostrophe-pages _children array is always empty? - node.js

I am trying to get the children of apostrophe pages to appear in my navigation object - however the _children array is always empty. My page does have child pages set up via the front end Pages UI.
My index.js for the lib/modules/apostrophe-pages module contains the following:
construct: function(self,options) {
// store the superclass method and call at the end
var superPageBeforeSend = self.pageBeforeSend;
self.pageBeforeSend = function(req, callback) {
// Query all pages with top_menu setting = true and add to menu collection
self.apos.pages.find(req, { top_menu: true }, {slug: 1, type: 1, _id: 1, title: 1})
.children(true)
.toArray(
function (err, docs) {
if (err) {
return callback(err);
}
req.data.navpages = docs;
return superPageBeforeSend(req, callback);
});
};
},
...
My top_menu attribute is set via apostrophe-custom-pages:
module.exports = {
beforeConstruct: function(self, options) {
options.addFields = [
{
name: 'subtitle',
label: 'Subtitle',
type: 'string'
},
{
name: 'css_class',
label: 'CSS Class',
type: 'string'
},
{
name: 'top_menu',
label: 'Include In Top Menu',
type: 'boolean'
}
].concat(options.addFields || []);
}
};
This gives me the pages I need with the top_menu setting.. but I want to get child pages too..
When debugging the code I can see that the docs._children array is present but is always empty, even though a page has child pages...
I have tried adding the following both to my app.js and to my index.js but it doesn't change the result:
filters: {
// Grab our ancestor pages, with two levels of subpages
ancestors: {
children: {
depth: 2
}
},
// We usually want children of the current page, too
children: true
}
How can I get my find() query to actually include the child pages?

Solved it..
I needed to add 'rank: 1, path: 1, level: 1' to the projection as per this page in the documentation: https://apostrophecms.org/docs/tutorials/howtos/children-and-joins.html#projections-and-children

Related

Filter and show only SVG images in ApostropheCMS

I created a new filter named svg to show only svg images or non-svg images.
But I don't understand where can I set the filter value to true or false?
The code is shown below.
Widget index file:
module.exports = {
label: 'Section SVG Images',
addFields: [
{
name: 'svg-images',
label: 'SVG Images',
type: 'singleton',
widgetType: 'apostrophe-images',
filters: {
svg: true
},
required: true
},
]
};
Custom cursor filter:
module.exports = {
construct: function(self, options) {
self.addFilter('svg', {
finalize: function() {
var svg = self.get('svg'); // <--- HARE svg is olways 'undefined'
if (typeof svg == 'undefined') {
return;
}
if (svg) {
var criteria = {
'attachment.extension': 'svg'
};
} else {
var criteria = {
'attachment.extension': { $ne: 'svg' }
};
}
self.and(criteria);
},
safeFor: 'public',
launder: function(a) {
return self.apos.launder.boolean(a);
}
});
}
};
filter is not a top level option for singleton fields in Apostrophe. That is unique to join-type fields. You could have this widget extend apostrophe-pieces-widgets, then you could probably set that same filters object directly on the new pieces widget. That should register the cursor filter, at least.
It's worth noting that there is currently a PR in progress to add filtering by both file type and orientation to Apostrophe core!

Using updateOne method to update an object field inside array - throws error "Cannot create field 'url' in element"

I have MongoDB database (with Mongoose) containing a collection of Products (among others), which looks like this:
[
{
name: 'Product A',
url: 'product-a',
category: 'accesory',
price: 12,
shortDescription: ['example description'],
technicalSpecs: [{ speed: 10, weight: 20 }],
images: [],
reviews: [],
relatedProducts: [
{
url: 'product-b',
name: 'Product B',
// to be added in Update query
//id: id_of_related_product
}
]
} /* other Product objects */
]
As every MongoDB document is provided with _id property by default, but within the relatedProducts array i only have url and name properties, i want to add the id property (associated with corresponding Product) for each object in the relatedProducts array, so i will be able to conveniently query and process those related products.
I came up with an idea to query all Products to get only those, which have non-empty relatedProducts array. Then i loop them and i search for Product model, which has specific url and name properties - this let's me get it's true (added by MongoDB) _id. At the end i want to add this _id to matching object inside relatedProducts array.
My code:
async function assignIDsToRelatedProducts(/* Model constructor */ Product) {
const productsWithRelatedOnes = await Product.find(
{ relatedProducts: { $ne: [] }}, ['relatedProducts', 'name', 'url']
);
for (const productItem of productsWithRelatedOnes) {
for (const relatedProduct of productItem.relatedProducts) {
const product = await Product.findOne(
{ url: relatedProduct.url, name: relatedProduct.name },
'_id'
);
// throws error
await productItem.updateOne(
{ 'relatedProducts.url': relatedProduct.url },
{ $set: { 'relatedProducts.$.id': product._id } }
);
}
}
}
However it throws the following error:
MongoError: Cannot create field 'url' in element {relatedProducts: [ /* array's objects here */ ]}
I don't know why MongoDB tries to create field 'url', as i use it to project/query url field (not create it) in updateOne method. How to fix this?
And - as i am newbie to MongoDB - is there a simpler way of achieving my goal? I feel that those two nested for..of loops are unnecessary, or even preceding creation of productsWithRelatedOnes variable is.
Is it possible to do with Mongoose Virtuals? I have tried it, but i couldn't match virtual property within the same Product Model - attach it to each object in relatedProducts array - after calling .execPopulate i received either an empty array or undefined (i am aware i should post at-the-time code of using Virtual, but for now i switched to above solution).
Although i didn't find solution or even reason of my problem, i solved it with a slightly other approach:
async function assignIDsToRelatedProducts(Product) {
const productsHavingRelatedProducts = Product.find({ relatedProducts: { $ne: [] }});
for await (const withRelated of productsHavingRelatedProducts) {
for (const relatedProductToUpdate of withRelated.relatedProducts) {
const relatedProduct = await Product
.findOne(
{ url: relatedProductToUpdate.url, name: relatedProductToUpdate.name },
['url', '_id']
);
await Product.updateMany(
{ 'relatedProducts.url': relatedProduct.url },
{ $set: { 'relatedProducts.$.id': relatedProduct._id } }
);
}
}
const amountOfAllProducts = await Product.find({}).countDocuments();
const amountOfRelatedProductsWithID = await Product
.find({ 'relatedProducts.id': { $exists: true } }).countDocuments();
console.log('All done?', amountOfAllProducts === amountOfRelatedProductsWithID);
}
Yet, i still suppose it can be done more concisely, without the initial looping. Hopefully somebody will suggest better solution. :)

Load defaultaddress of customer in netsuite via record search

I am working on creating a RESTlet to load paginated customer records. One of the thing i need is the customer's defaultaddress. This field is available when I load a single customer like this:
customer = record.load({
type: record.Type.CUSTOMER,
id: context.id, // e.g. 1234
isDynamic: false
});
However, when I try to load all customers with pagination like this:
define(['N/record', 'N/search'], function(record, search) {
var currencies = {};
function getClients(context) {
var mySearch = search.create({
type: search.Type.CUSTOMER,
columns: [
{ name: 'companyname' },
{ name: 'vatregnumber' },
{ name: 'lastmodifieddate' },
{ name: 'currency' },
{ name: 'email' },
{ name: 'phone' },
{ name: 'defaultaddress' }
]
});
var searchResult = mySearch.run().getRange({
start: context.start || 0,
end: context.end || 100
});
var results = [];
for (var i = 0; i < searchResult.length; i++) {
results.push({
tax_number: searchResult[i].getValue({ name: 'vatregnumber' }),
name: searchResult[i].getValue({ name: 'companyname' }),
// snipped...
});
}
return results;
}
function loadCurrencyName(id) {
return record.load({
type: record.Type.CURRENCY,
id: id,
isDynamic: false
}).name;
}
return {
get: getClients
}
});
and execute the rest api call to the above RESETlet script; I get the following error:
{
"type":"error.SuiteScriptError",
"name":"SSS_INVALID_SRCH_COL",
"message":"An nlobjSearchColumn contains an invalid column, or is not in proper syntax: defaultaddress.",
"stack":[ ... snipped ... ],
"cause":{ ... snipped ... },
"id":"",
"notifyOff":false
}
Any idea how to best to load the default address of the customer, whilst loading all customers using the paged search feature?
{defaultaddress} is calculated when the customer record is loaded - it's not a stored field and is not available for saved searches. As the field help says, it's simply the default billing address and changes automatically according to what address in the address subtab you have checked 'Default Billing" on.
To work around this and display what you're looking for, you can simply replace defaultaddress with billaddress.

Q promises and mongo db q.all

I'm making calls to a mongodb database - pulling data out... reading it, and then making further requests based on that data. Once all the data has been received, I wish to process it.
I've been using Q.promises library but don't know what I'm doing. I thought that q.all would only trigger once everything has completed? However, my processPlaylist function runs twice. I've commented the code below:
Thanks,
Rob
var PlaylistCollection = require('./models/playlist');
var AssetCollection = require('./models/asset');
var screenID = '############';
var playerData = [];
// array continaing playlistys which have been synced
var alreadySynced = [];
// Process our playlist once downloaded
var processPlaylist = function (playerData) {
console.log('----Processing Playerlist-----')
console.log(playerData);
// DO STUFF
}
// Get playlist by id. Return playlist Data
var getSubLists = function (id) {
return PlaylistCollection.findById(id);
}
// Get sub-playlist function
function getSubListRecursive(id) {
return getSubLists(id).then(function (playlist) {
// store all our returned playlist data into a playlist array
playerData.push(playlist)
// an Array to keep tabs on what we've already pulled down
alreadySynced.push(playlist.id)
// get all our playlist.resources, and only return those which are unique
var playlistResources = _.uniq(playlist.resources, 'rid');
// console.log('Playlist Resources: ', playlistResources)
// console.log(alreadySynced);
var sublists = _.pluck(_.filter(playlistResources, { 'type': 'playlist' }), 'rid');
// remove playlists which have already been synced. We don't want to pull them down twice
sublists = _.difference(sublists, alreadySynced);
// console.log('sublists: ', sublists)
// Get the next playlist and so on...
var dbops = sublists.map(function (sublist) {
// console.log(sublist)
return getSubListRecursive(sublist)
});
q.all(dbops).then(function () {
console.log('All Done - so process the playlist')
return processPlaylist(playerData);
});
})
}
// Trigger the whole process..... grab our first playlist / ScreenID
getSubListRecursive(screenID);
and I get the following output:
----Processing Playerlist-----
[ { _id: 554d1df16ce4c438f8e2225b,
title: 'list 1',
__v: 29,
daily: true,
endTime: '',
startTime: '',
resources:
[ { rid: '55650cebef204ab70302a4d9',
title: 'list 4',
type: 'playlist' },
{ rid: '554d1df16ce4c438f8e2225b',
title: 'list 1',
type: 'playlist' } ] },
{ _id: 55650cebef204ab70302a4d9,
title: 'list 4',
__v: 1,
daily: false,
endTime: '',
startTime: '',
resources:
[ { rid: '55650647ef204ab70302a4d8',
title: 'list 3',
type: 'playlist' } ] } ]
All Done - so process the playlist
----Processing Playerlist-----
[ { _id: 554d1df16ce4c438f8e2225b,
title: 'list 1',
__v: 29,
daily: true,
endTime: '',
startTime: '',
resources:
[ { rid: '55650cebef204ab70302a4d9',
title: 'list 4',
type: 'playlist' },
{ rid: '554d1df16ce4c438f8e2225b',
title: 'list 1',
type: 'playlist' } ] },
{ _id: 55650cebef204ab70302a4d9,
title: 'list 4',
__v: 1,
daily: false,
endTime: '',
startTime: '',
resources:
[ { rid: '55650647ef204ab70302a4d8',
title: 'list 3',
type: 'playlist' } ] },
{ _id: 55650647ef204ab70302a4d8,
title: 'list 3',
__v: 5,
daily: false,
endTime: '',
startTime: '',
resources:
[ { rid: '55650637ef204ab70302a4d7',
title: 'list 2',
type: 'playlist' },
{ rid: '554d1df16ce4c438f8e2225b',
title: 'list 1',
type: 'playlist' },
{ rid: '55650cebef204ab70302a4d9',
title: 'list 4',
type: 'playlist' } ] } ]
EDIT
There were a number of things wrong with what I wrote. I discussed it with a buddy of mine - who pointed out that getSubListRecursive is being invoked recursively several times so the q.all statement is being executed several times...
So I refactored...
// Get sub-playlist function
function getSubListRecursive(id) {
console.log(id)
return getSubLists(id).then(function (playlist) {
if (playlist) {
// store all our returned playlist data into a playlist array
playerData.push(playlist)
// an Array to keep tabs on what we've already pulled down
alreadySynced.push(playlist.id)
// get all our playlist.resources, and only return those which are unique
var playlistResources = _.uniq(playlist.resources, 'rid');
// console.log('Playlist Resources: ', playlistResources)
// console.log(alreadySynced);
var sublists = _.pluck(_.filter(playlistResources, { 'type': 'playlist' }), 'rid');
// remove playlists which have already been synced. We don't want to pull them down twice
sublists = _.difference(sublists, alreadySynced);
// console.log('sublists: ', sublists)
return sublists.map(function (sublist) {
// console.log(sublist)
if (sublists.length > 0) {
return getSubListRecursive(sublist)
} else {
return processPlaylist(playerData);
}
});
} else {
return processPlaylist(playerData);
}
});
}
this works. I'm basically using promises to control the flow here - which probably isn't the best way of doing it? I no longer use an all statement, and ultimately end up with array populated with all the playlist data - which I can manipulate in my processPlaylist function.
However, I've not marked the question as solved, as I'd really like to know how I can do this with Q.all (properly use promises)
Thanks,
Rob
I think you were just confused about when the entire process was finished. You need to wait until the entire recursive promise chain has resolved. I think you could use the original code with a slight change to where processPlaylist() is called:
var PlaylistCollection = require('./models/playlist');
var AssetCollection = require('./models/asset');
var screenID = '############';
var playerData = [];
// array continaing playlistys which have been synced
var alreadySynced = [];
// Process our playlist once downloaded
var processPlaylist = function (playerData) {
console.log('----Processing Playerlist-----')
console.log(playerData);
// DO STUFF
}
// Get playlist by id. Return playlist Data
var getSubLists = function (id) {
return PlaylistCollection.findById(id);
}
// Get sub-playlist function
function getSubListRecursive(id) {
return getSubLists(id).then(function (playlist) {
// store all our returned playlist data into a playlist array
playerData.push(playlist)
// an Array to keep tabs on what we've already pulled down
alreadySynced.push(playlist.id)
// get all our playlist.resources, and only return those which are unique
var playlistResources = _.uniq(playlist.resources, 'rid');
// console.log('Playlist Resources: ', playlistResources)
// console.log(alreadySynced);
var sublists = _.pluck(_.filter(playlistResources, { 'type': 'playlist' }), 'rid');
// remove playlists which have already been synced. We don't want to pull them down twice
sublists = _.difference(sublists, alreadySynced);
// console.log('sublists: ', sublists)
// Get the next playlist and so on...
var dbops = sublists.map(function (sublist) {
// console.log(sublist)
return getSubListRecursive(sublist)
});
return q.all(dbops);
});
}
// Trigger the whole process..... grab our first playlist / ScreenID
getSubListRecursive(screenID).then(function() {
console.log('All Done - so process the playlist')
return processPlaylist(playerData);
});

Sails.js - Querying array size

// Item.js
schema: true,
attributes: {
testArray: {
type: 'array',
required: true,
array: true
}
}
I would like to find all items where the testArray attribute have a specific length.
I tried with this code below, but it doesn't work.
Item.find({testArray: {length: 2}}).exec(function (err, items) {
console.log(items);
});
I also tried with minLength, maxLength, size, but still no results.
Is there is a way to do that?
I'm using MongoDB via Sails.js/Waterline.
Actually this is in documentation
Model.where({ property: { '>': 100 }})
http://sailsjs.org/#!documentation/models
Another solution:
var query = { testArray: { $size: { $gt:2 } } };
Item.native(function(err, collection) {
collection.find(query).toArray(function(err, items) {
//items contains objects where testArray.length is greater than 2
});
});
You might run into problems depending on your mongodb version, then read http://www.mkyong.com/mongodb/mongodb-find-all-documents-where-an-array-list-size-is-greater-than-n/

Resources