Ember.js precompiler without node on production environment - node.js

I try to use the ember precompiler as described here. I just want to precompile in my development process and deploy everything to my production environment where there is no node.js server.
When I include the generated js - files for all my compiled templates I get these errors in console: SyntaxError: missing declaration after 'export' keyword
This is a example of my fu.js - file, compiled from fu.hbs.
export default Ember.HTMLBars.template((function() {
return {
meta: {
"revision": "Ember#1.13.3",
"loc": {
"source": null,
"start": {
"line": 1,
"column": 0
},
"end": {
"line": 1,
"column": 10
}
}
},
arity: 0,
cachedFragment: null,
hasRendered: false,
buildFragment: function buildFragment(dom) {
var el0 = dom.createDocumentFragment();
var el1 = dom.createTextNode("Hello Fufu");
dom.appendChild(el0, el1);
return el0;
},
buildRenderNodes: function buildRenderNodes() { return []; },
statements: [
],
locals: [],
templates: []
};
}()));
Shouldn't be there a identifier for my hbs? Like "fu.hbs", how are the precompiled templates added to my ember application? Do I just have to reference the js-files inside my index.html with the same name as the templates?
If everything would work, do I still have to put the following stuff into my index.html?
<script type="text/x-handlebars" data-template-name="fu" id="fu">
Hello Fufu
</script>

I found the solution now, without docs...
After precompiling the templates I had to add every Template to the Ember.TEMPLATES[] array so the code now looks like:
Ember.TEMPLATES["fu"] = Ember.HTMLBars.template((function() {
return {
meta: {
"revision": "Ember#1.13.3",
"loc": {
"source": null,
"start": {
"line": 1,
"column": 0
},
"end": {
"line": 1,
"column": 10
}
}
},
arity: 0,
cachedFragment: null,
hasRendered: false,
buildFragment: function buildFragment(dom) {
var el0 = dom.createDocumentFragment();
var el1 = dom.createTextNode("Hello Fufu");
dom.appendChild(el0, el1);
return el0;
},
buildRenderNodes: function buildRenderNodes() { return []; },
statements: [
],
locals: [],
templates: []
};
}()));
The <script type="text/x-handlebars" data-template-name="fu" id="fu"> - Elements in Index - file can be removed.

Related

Unable to retrive ordered job list from Google Transcoder API

i'm using the node.js client library of google transcoder api. I'm able to retrive a paginated list of some jobs, but i'm not able to order elements by start date. Here my codes:
const { TranscoderServiceClient } = require('#google-cloud/video-transcoder').v1;
class TranscoderApiController {
constructor() {
this.projectId = process.env.GOOGLE_CLOUD_PROJECT;
this.location = process.env.TASK_LOCATION;
}
async getEntries(req, res, next) {
const params = {
pageSize: req.query.pageSize ? parseInt(req.query.pageSize) : 10,
pageToken: req.query.pageToken,
filter: req.query.filter,
orderBy: req.query.orderBy
}
const client = new TranscoderServiceClient();
const result = await client.listJobs({
parent: client.locationPath(this.projectId, this.location),
pageSize: params.pageSize,
orderBy: 'createTime.seconds'
}, {
autoPaginate: false
});
if (result.length == 3 && result[2] != undefined) {
return result[2];
} else {
return result[1];
}
}
}
module.exports = new TranscoderApiController();
When i call the getEntries method i receive the following error:
"3 INVALID_ARGUMENT: The request was invalid: sort order \"createTime.seconds\" is unsupported"
If i remove the orderBy: 'createTime.seconds' line then the api works but is not ordered as i want. The result is something like that (i abbreviate the json):
{
"jobs": [
{
"labels": {},
"name": "projects/<id>/locations/europe-west1/jobs/<uuid>",
"inputUri": "",
"outputUri": "",
"state": "SUCCEEDED",
"createTime": {
"seconds": "1656602896",
"nanos": 386772728
},
"startTime": {
"seconds": "1656602900",
"nanos": 755000000
},
"endTime": {
"seconds": "1656603062",
"nanos": 428000000
},
"ttlAfterCompletionDays": 30,
"error": null,
"config": {
"inputs": [
{
"key": "input0",
"uri": "gs://<url>/render_md.mp4",
"preprocessingConfig": null
}
],
"editList": [...],
"elementaryStreams": [...],
"muxStreams": [...],
"manifests": [],
"adBreaks": [],
"spriteSheets": [],
"overlays": [],
"output": {
"uri": "gs://<url>/md.mp4/"
},
"pubsubDestination": {
"topic": "projects/<id>/topics/transcoder_api"
}
},
"jobConfig": "config"
},
...
],
"unreachable": [],
"nextPageToken": "Co8BCjgKDGV1cm9wZS13ZXN0MRIZdHJhbnNjb2Rlci5nb29nbGVhcGlzLmNvbRgBII..."
}
As you can see each job have the startTime.seconds property. I follow the syntax described here:
https://google.aip.dev/132#ordering
Any support to solve the ordered issue is appreciated.

Why do I keep getting a parsing error with the ESLint CLIEngine API?

I am trying to use the CLIEngine in the ESLint Nodejs API. The use case is that I am passing stringified HTML to a REST endpoint to Lint it with an A11y Lint plugin. This will provide real-time A11y feedback to users that are practicing HTML in a browser code editor.
Right now, just to get things working, I am hard coding an HTML string into the cli.executeOnText() method.
Here's the REST endpoint:
router.post('/lint', (req, res) => {
console.log("req: ", req.body);
const cli = new CLIEngine({
envs: ["node"],
useEslintrc: false,
rules: {
semi: 2
}
});
cli.addPlugin("eslint-plugin-lit-a11y", {
processors: {
".html": {
preprocess: function(text) {
return [text];
},
postprocess: function(messages) {
return messages[0];
}
}
}
})
const report = cli.executeOnText("<img src='https://google.com' />", "foo.html");
console.log("RESULTS: ", report.results);
res.json({body: report.results});
});
And these are the report results that get returned:
{
"body": [
{
"filePath": "/Users/Shared/work/accessibility-code-school/foo.html",
"messages": [
{
"ruleId": null,
"fatal": true,
"severity": 2,
"message": "Parsing error: Unexpected token <",
"line": 1,
"column": 1
}
],
"errorCount": 1,
"fatalErrorCount": 1,
"warningCount": 0,
"fixableErrorCount": 0,
"fixableWarningCount": 0,
"source": "<img src='https://google.com' />"
}
]
}
I have spent a good deal of time reading through the ESLint API docs, but am clearly missing something...but am pretty sure I am close. Any ideas what piece I am missing to fix the error Parsing error: Unexpected token <?
I am using "eslint": "^7.32.0" and "eslint-plugin-lit-a11y": "^1.1.0"

Errors posting post REQUEST in thunder client

When I make a post REQUEST in thunder client I can't get my data back in response but I get 500 internal server error, this is the error that I got the posting request in thunder client or postman
{
"code": 79,
"codeName": "UnknownReplWriteConcern",
"errInfo": {
"writeConcern": {
"w": "majority;",
"wtimeout": 0,
"provenance": "clientSupplied"
}
},
"result": {
"n": 1,
"opTime": {
"ts": {
"$timestamp": "7022899934215012355"
},
"t": 99
},
"electionId": "7fffffff0000000000000063",
"ok": 1,
"writeConcernError": {
"code": 79,
"codeName": "UnknownReplWriteConcern",
"errmsg": "No write concern mode named 'majority;' found in replica set configuration",
"errInfo": {
"writeConcern": {
"w": "majority;",
"wtimeout": 0,
"provenance": "clientSupplied"
}
}
},
"$clusterTime": {
"clusterTime": {
"$timestamp": "7022899934215012355"
},
"signature": {
"hash": "/gnrM/bYkyRYi4XXXmEnkaLJJpg=",
"keyId": {
"low": 1,
"high": 1620408145,
"unsigned": false
}
}
},
"operationTime": {
"$timestamp": "7022899934215012355"
}
}
}
I have faced the same problem and put this code which contain ( writeConcern )in my schema and it works
const schema = new Schema({
name: String
}, {
writeConcern: {
w: 'majority',
j: true,
wtimeout: 1000
}
});
another thing I did I removed a single quotes that was around my db url
checkout this https://pretagteam.com/question/mongowriteconcernerror-no-write-concern-mode-named-majority-found-in-replica-set-configuration

JSON Transform Only Returning 1st Item in JSON Object

I am using object-mapper to transform a JSON object to a different JSON format. The following works... but only produces the first entry in the JSON I have. Is there some sort of foreach or iteration that needs to be added?
This:
var src = response.data;
var map = {
"value.segments.start": "value.start",
"value.segments.end": "value.end",
"value.segments.segments.performanceCounters/processCpuPercentage.avg": "value.cpu"
};
var dest = objectMapper(src, map);
res.send(dest);
Produces this:
{
"value": {
"start": "2021-04-15T00:00:00.000Z",
"end": "2021-04-16T00:00:00.000Z",
"cpu": 9.01
}
}
But here is my response.data as I get JSON from one source and need to transform it to another format, as you can see multiple entries and I expected to multiple entries in the object-mapped output:
{
"value": {
"start": "2021-04-14T18:17:27.086Z",
"end": "2021-04-15T18:17:27.086Z",
"interval": "PT1H",
"segments": [
{
"start": "2021-04-14T18:17:27.086Z",
"end": "2021-04-14T19:00:00.000Z",
"segments": [
{
"performanceCounters/processCpuPercentage": {
"avg": 22.9
},
"customDimensions/Role": "CD"
}
]
},
{
"start": "2021-04-14T19:00:00.000Z",
"end": "2021-04-14T20:00:00.000Z",
"segments": [
{
"performanceCounters/processCpuPercentage": {
"avg": 23.51
},
"customDimensions/Role": "CD"
}
]
},
{
"start": "2021-04-15T00:00:00.000Z",
"end": "2021-04-15T01:00:00.000Z",
"segments": [
{
"performanceCounters/processCpuPercentage": {
"avg": 8.85
},
"customDimensions/Role": "CD"
}
]
},
It looks like objectMapper is just for Objects, not arrays of objects. You should have luck with the following:
var src = response.data;
var map = {
"value.segments.start": "value.start",
"value.segments.end": "value.end",
"value.segments.segments.performanceCounters/processCpuPercentage.avg": "value.cpu"
};
var dest = src.map(s => objectMapper(s, map) );
res.send(dest);

Node/MongoDb JSON empty array bug

I recently upgraded to Node 5.4 and MongoDb 2.1.4 Driver for Node and I have the following problem :
I have a file called buildings.json containing something like this :
{
"military_base": {
"type": "military",
"level": 0,
"maxLevel": 25,
"upgrade": true,
"targetEnvironment": [],
"timeUntilBuilt": 0,
"costCoef": 0.4,
"requiredResearches": [
"foo#4",
"bar#1"
],
"requiredResources": [
"cash",
"metal",
"palladium"
],
"inProgress": 0,
"queue": []
},
"aqua_center": {
"type": "industrial",
"level": 0,
"maxLevel": 25,
"upgrade": true,
"targetEnvironment": [
"ocean",
"snowy"
],
"timeUntilBuilt": 0,
"costCoef": 0.7,
"requiredResearches": [
"lorem#10",
"ipsum#3"
],
"requiredResources": [
"cash",
"cristal"
],
"inProgress": 0,
"queue": []
}
}
Now, I want to insert this document into a mongo database:
let buildings = require( "buildings" );
...
collection.insertOne( { foo: buildings }, callback );
If the first "targetEnvironment" array is empty, nothing is inserted, the callback is not called and there is no error. But if it's filled, all is fine, the callback is called and the document is correctly inserted...
I must admit that I don't know if the problem is related to Node or the MongoDb Driver...
Can someone help me, please ?
#!/usr/bin/env node
const mongodb = require( "mongodb" );
const MongoClient = mongodb.MongoClient;
const ObjectID = mongodb.ObjectID;
MongoClient.connect( "mongodb://127.0.0.1:27017/test", function( err, db ) {
if ( err ) {
throw err;
}
var collection = db.collection( "test-insert" );
collection.insertOne( test, ( err, obj ) => {
if ( err ) {
throw err;
}
console.log( obj.insertedId );
db.close();
} );
} );
var test = {
"military_base": {
"type": "military",
"level": 0,
"maxLevel": 25,
"upgrade": true,
"targetEnvironment": [],
"timeUntilBuilt": 0,
"costCoef": 0.4,
"requiredResearches": ["foo#4", "bar#1"],
"requiredResources" : ["cash", "metal", "palladium"],
"inProgress": 0,
"queue": []
},
"aqua_center": {
"type": "industrial",
"level": 0,
"maxLevel": 25,
"upgrade": true,
"targetEnvironment": ["ocean", "snowy"],
"timeUntilBuilt": 0,
"costCoef": 7,
"requiredResearches": ["lorem#10", "ipsum#3"],
"requiredResources" : ["cash", "cristal"],
"inProgress": 0,
"queue": []
}
};
If I try to insert this document, it will not be inserted because of the first costCoef property is a floating number. It will also fail if it's a large number like timestamp.
Oh... As I have a 64bit Windows, I installed 64bit nodejs version and the problem came from here... with 32bit, there is no error ! :o

Resources