Cant configure timed out user confirmation url page in LoopbackJS - node.js

I try to configure, new registerede user's timed ot confirmation url.
When user clicks on timed out link from mail, screen looks like :
{"error":{"name":"Error","status":404,"message":"User not found: 19","statusCode":404,"code":"USER_NOT_FOUND","stack":"Error: User not found: 19\n at c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback\\common\\models\\user.js:477:19\n at c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback-datasource-juggler\\lib\\dao.js:1524:62\n at c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback-datasource-juggler\\lib\\dao.js:1456:9\n at Object.async.each (c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback-datasource-juggler\\node_modules\\async\\lib\\async.js:153:20)\n at allCb (c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback-datasource-juggler\\lib\\dao.js:1394:13)\n at c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback-connector-mysql\\node_modules\\loopback-connector\\lib\\sql.js:1071:7\n at c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback-datasource-juggler\\lib\\observer.js:166:22\n at doNotify (c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback-datasource-juggler\\lib\\observer.js:93:49)\n at MySQL.ObserverMixin._notifyBaseObservers (c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback-datasource-juggler\\lib\\observer.js:116:5)\n at MySQL.ObserverMixin.notifyObserversOf (c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback-datasource-juggler\\lib\\observer.js:91:8)"}}
I want to send more user-friendly page includes message like "Confirmation mail timed out ...".
I try to use "afterRemote" method, but it does not work. I cant get "in confirm afterRemote" message.
MyUser.afterRemote('confirm', function(ctx, inst, next) {
console.log('in confirm afterRemote...');
next(); });
Is there any way to do this? What is wrong with this afterRemote method?

Edit your server/config.json
and set "disableStackTrace": true and "disableStatusCode": true
under errorHandler section look below example
Don't forget to vote ;-)
Cheers
{
"restApiRoot": "/api",
"host": "0.0.0.0",
"port": 4000,
"remoting": {
"context": {
"enableHttpContext": false
},
"rest": {
"normalizeHttpPath": false,
"xml": false
},
"json": {
"strict": false,
"limit": "100kb"
},
"urlencoded": {
"extended": true,
"limit": "100kb"
},
"cors": {
"origin": true,
"credentials": true
},
"errorHandler": {
"disableStackTrace": true,
"disableStatusCode": true
}
}
}

Related

Unable to migrate link where Link of type System.LinkTypes.Hierarchy-Forward

I used Azure DevOps Migration Tools to migrate all types of a work items from two azure projects in the same organization
I search a lot and went through all documents but I not getting a robust solution on how to migrate parent-child link items to the destination project. I make the LinkMigrationSaveEachAsAdded flag true/false but it did not work.
My configuration file,
{
"Version": "12.0",
"LogLevel": "Verbose",
"workaroundForQuerySOAPBugEnabled": false,
"Source": {
"$type": "TfsTeamProjectConfig",
"Collection": "https://dev.azure.com/MyNameOrg/",
"Project": "SourceProject",
"ReflectedWorkItemIDFieldName": "TfsMigrationTool.ReflectedWorkItemId",
"AuthenticationMode": "AccessToken",
"AllowCrossProjectLinking": false,
"PersonalAccessToken": "MyTokenHidden",
"LanguageMaps": {
"AreaPath": "Area",
"IterationPath": "Iteration"
}
},
"Target": {
"$type": "TfsTeamProjectConfig",
"Collection": "https://dev.azure.com/MyNameOrg/",
"Project": "DestinationProject",
"ReflectedWorkItemIDFieldName": "Custom.ReflectedWorkItemId",
"AuthenticationMode": "AccessToken",
"AllowCrossProjectLinking": false,
"PersonalAccessToken": "MyTokenHidden",
"LanguageMaps": {
"AreaPath": "Area",
"IterationPath": "Iteration"
}
},
"FieldMaps": [
{
"$type": "TreeToTagMapConfig",
"WorkItemTypeName": "*",
"toSkip": 3,
"timeTravel": 1
}
],
"WorkItemTypeDefinition": {
"sourceWorkItemTypeName": "targetWorkItemTypeName"
},
"GitRepoMapping": null,
"Processors": [
{
"$type": "WorkItemMigrationConfig",
"Enabled": true,
"ReplayRevisions": true,
"PrefixProjectToNodes": false,
"UpdateCreatedDate": true,
"UpdateCreatedBy": true,
"WIQLQueryBit": " AND [Custom.ReqID] contains 'CR' AND [System.WorkItemType] NOT IN ('Test Suite', 'Test Plan')",
"WIQLOrderBit": "[System.ChangedDate] desc",
"LinkMigration": true,
"AttachmentMigration": true,
"AttachmentWorkingPath": "c:\\temp\\WorkItemAttachmentWorkingFolder\\",
"FixHtmlAttachmentLinks": false,
"SkipToFinalRevisedWorkItemType": false,
"WorkItemCreateRetryLimit": 5,
"FilterWorkItemsThatAlreadyExistInTarget": false,
"PauseAfterEachWorkItem": false,
"AttachmentMaxSize": 480000000,
"AttachRevisionHistory": false,
"LinkMigrationSaveEachAsAdded": true,
"GenerateMigrationComment": false,
"WorkItemIDs": null,
"MaxRevisions": 0,
"NodeStructureEnricherEnabled": true,
"UseCommonNodeStructureEnricherConfig": false,
"NodeBasePaths": [],
"AreaMaps": {},
"IterationMaps": {},
"MaxGracefulFailures": 0
}
]
}
[SKIP] Unable to migrate link where Link of type System.LinkTypes.Hierarchy-Forward where wiSourceL=182, wiSourceR=222, wiTargetL=189, wiTargetR=222 as target WI has not been migrated #1335
Given the configuration file in question, it works for all types of migration such as parent to child, attachments such as pdf, images, docs, xls, etc.
The problem in the above file is on line
"WIQLQueryBit": " AND [Custom.ReqID] contains 'CR' AND [System.WorkItemType] NOT IN ('Test Suite', 'Test Plan')",
Custom.ReqID field should be required in every epic, feature, user story, bug, and task and the field value should start with the words CR.

Strapi api not giving a response when registering user even though it was updated in the admin panel

I am using Strapi with postgres to register a new Strapi end user and I am using the following code to send a post request with the new user's credentials:
//...
try {
// encrypt the user password
const encryptedUserPassword = await bcrypt.hash(password, 10);
const response = await axios.post(
"http://localhost:1337/api/auth/local/register",
{
username,
email: email.toLowerCase(),
password: encryptedUserPassword,
}
);
} catch (err) {
console.log(err);
res.status(500).send({ message: ["Registration failed"], error: err });
}
// ...
The problem that I am facing is that whenever I send the post request, the data is being successfully updated in the Strapi admin panel and eventually in the postgres database but it is not returning a successful response and it continues to process until it throws an Axios error even though the data is updated in my Strapi admin panel.
This is the error that I received when I send the post request to register a new user:
{
"message": [
"Registration failed"
],
"error": {
"message": "Request failed with status code 400",
"name": "AxiosError",
"config": {
"transitional": {
"silentJSONParsing": true,
"forcedJSONParsing": true,
"clarifyTimeoutError": false
},
"transformRequest": [
null
],
"transformResponse": [
null
],
"timeout": 0,
"xsrfCookieName": "XSRF-TOKEN",
"xsrfHeaderName": "X-XSRF-TOKEN",
"maxContentLength": -1,
"maxBodyLength": -1,
"env": {},
"headers": {
"Accept": "application/json, text/plain, */*",
"Content-Type": "application/json",
"User-Agent": "axios/0.27.2",
"Content-Length": 130
},
"method": "post",
"url": "http://localhost:1337/api/auth/local/register",
"data": "{\"username\":\"testuser04\",\"email\":\"testuser09#email.com\",\"password\":\"$2a$10$pqeADn.WL4BqHYpTonVl2.KYqoxtuJZyvdpgc659W90zmsu4Wo2jW\"}"
},
"code": "ERR_BAD_REQUEST",
"status": 400
}
}
I am using the recommended Strapi node version 14.19.3 with the following package.json dependencies:
"devDependencies": {},
"dependencies": {
"#strapi/strapi": "4.2.3",
"#strapi/plugin-users-permissions": "4.2.3",
"#strapi/plugin-i18n": "4.2.3",
"pg": "8.6.0"
},
Could someone please help me or give me some tips on what I am doing wrong? Thank you in advance
Well, I just resolved this problem. It turns out that just needed to set the default value for the email confirmation in the admin panel to false. Also, I needed to authorize my application to make requests directly to the API

Laravel Echo Server can not be authenticated, got HTTP status 500

I've installed both Laravel echo server and Laravel echo client.
Following is the laravel-echo-server.json configuration.
{
"authHost": "http://taxation.com",
"authEndpoint": "/broadcasting/auth",
"clients": [
{
"appId": "APP_ID",
"key": "someKey"
}
],
"database": "redis",
"databaseConfig": {
"redis": {},
"sqlite": {
"databasePath": "/database/laravel-echo-server.sqlite"
}
},
"devMode": true,
"host": "127.0.0.1",
"port": "3000",
"protocol": "http",
"socketio": {},
"sslCertPath": "",
"sslKeyPath": "",
"sslCertChainPath": "",
"sslPassphrase": ""
}
The following script listens for channel events. It builds fine with npm run dev.
import Echo from 'laravel-echo'
let token = document.head.querySelector('meta[name="token"]');
if (token) {
window.axios.defaults.headers.common['X-CSRF-TOKEN'] = token.content;
} else {
console.error('CSRF token not found: https://laravel.com/docs/csrf#csrf-x-csrf-token');
}
window.Echo = new Echo({
broadcaster: 'socket.io',
host: '127.0.0.1:3000',
reconnectionAttempts: 5
});
window.Echo.join('checked-in-1')
.listen('.user.checked_in', (e) => {
console.log(e);
});
When trying to listen for any event on start laravel-echo-server command. It keeps throwing Client can not be authenticated, got HTTP status 500.
Note :
I really didn't find anything helpful on laravel-echo-serve nor on google.
Any help will be appreciated a lot.
Laravel V5.4
Thanks
Just getting the issue because of CSRF token. Didn't passed the token to the echo.
window.Echo = new Echo({
broadcaster: 'socket.io',
host: '127.0.0.1:3000',
reconnectionAttempts: 5,
csrfToken: token.content <--
});

NightmareJS: how to set cookie?

var Nightmare = require('nightmare');
var nightmare = Nightmare({
show: true
})
nightmare
.goto('https://mail.yandex.ru')
.type('input[name=login]', 'mylogin')
.type('input[name=passwd]', 'mypassword')
.click('button.nb-button._nb-action-button.nb-group-start')
.wait('.mail-User-Name')
.cookies.get()
.then(function (cookies) {
//actions
})
I am getting cookies after authorization, but I don't know where I must set them and how I must set them. I've tried to do .cookie.set() at the beginning, but this doesn't work.
How I can use saved cookie? Thanks.
I did the following from the node terminal:
> var Nightmare = require('nightmare')
undefined
> var nightmare = Nightmare({show:true})
undefined
> nightmare.
... goto('https://google.com').
... cookies.set('foo', 'bar').
... cookies.get().
... then((cookies) => {
... console.log(JSON.stringify(cookies, null, 4))
... })
Promise { <pending> }
> [
{
"name": "NID",
"value": "96=qo1qY9LTKh1np4OSgiyJTi7e79-_OIoIuc71hnrKWvN1JUnDLJqZlE8u2ij_4mW0-JJhWOCafo5J0j-YkZCFt8H2VHzYUom4cfEd2QLOEsHmAcT2ACx4a5xSvO0SZGZp",
"domain": ".google.de",
"hostOnly": false,
"path": "/",
"secure": false,
"httpOnly": true,
"session": false,
"expirationDate": 1502733434.077271
},
{
"name": "CONSENT",
"value": "WP.25d07b",
"domain": ".google.de",
"hostOnly": false,
"path": "/",
"secure": false,
"httpOnly": false,
"session": false,
"expirationDate": 2145916800.077329
},
{
"name": "foo",
"value": "bar",
"domain": "www.google.de",
"hostOnly": true,
"path": "/",
"secure": false,
"httpOnly": false,
"session": true
}
]
nightmare.cookies.set('key', 'value') is indeed the correct way to use it, as you can see in my result object. Perhaps https://mail.yandex.ru does not accept your cookie, because it's invalid. Please do the same and edit your question to include your results.
Edit: Apparently, OP needs to store the cookies so he can use them in another Nightmare instance. This can be achieved like this:
var Nightmare = require('nightmare')
var storedCookies // This is where we will store the cookies. It could be stored in a file or database to make it permanent
// First instance:
var nightmare1 = Nightmare({show: true})
nightmare1.
goto('https://google.com').
cookies.get().
then((cookies) => {
storedCookies = cookies
})
// Second instance:
var nightmare2 = Nightmare({show: true})
for(var i = 0; i < storedCookies.length; i++)
nightmare2.
cookies.set(storedCookies[i].name, storedCookies[i].value)
nightmare2.
goto('https://google.com')

Still unable to connect one Openshift app to another Openshift app's MongoDB database

After following the advice presented here:
(OpenShift) Sharing MongoDB between Apps not possible anymore?
...which in a nutshell is this -> "You need to create the application as scalable, then add MongoDB for it to work correctly. MongoDB will then go onto it's own gear, with it's own ip address and port number (remember to use this port number, or nothing will work)."
...I am still unable to successfully connect my Node.js app to the MongoDB database living on another Node.js app in Openshift.
I have tried the following permutations of a host string that have been suggested:
localhost
128.xxx.xxx.xxx
[24 character hash]-myapp-mydomain.rhcloud.com
...all with the same result. No connection.
Here is the code that makes the call to setup the db connection...
dbProvider = function(host, port, user, pass) {
console.re.log("Attempting to create a DB instance...");
this.db = new Db(process.env.OPENSHIFT_MONGODB_EXTDB_DBNAME, new Server(host, port, { auto_reconnect: true }, {}));
console.re.log("created the DB instance...");
if(this.db!= null){
console.re.log("successfully connected to remote db %s at %s:%s",this.db,host,port);
this.db.open(function(error, db){
if(error){
console.re.log("error %s attempting to open the db at %s:%s",error,host,port);
}
else{
db.authenticate(user, pass, function(error, result) { //db.authenticate(user, pass, function(error, result)
if(error){
console.re.log("error %s attempting to connect to remote db at %s:%s using user %s",error,host,port,user);
}
else{
console.re.log("successfully connected to remote db at %s:%s using user %s",host,port,user);
}
});
}
});
}
else{
console.re.log("unable to connect to remote db at %s:%s",host,port);
}
}
The above code fails at the callback from the db.open() with a null error object.
Upon inspecting the this.db object we see the following:
{
"domain": null,
"_events": {},
"_maxListeners": 10,
"databaseName": "myDb",
"serverConfig": {
"domain": null,
"_events": {},
"_maxListeners": 10,
"host": "xxxxxxxxxxxxxxxxxxxxxxxx-myApp-myDomain.rhcloud.com",
"port": 12345,
"options": {
"auto_reconnect": true
},
"internalMaster": false,
"connected": false,
"poolSize": 5,
"disableDriverBSONSizeCheck": false,
"ssl": false,
"_used": true,
"_readPreference": null,
"socketOptions": {},
"logger": {
"error": "[Function]",
"log": "[Function]",
"debug": "[Function]"
},
"eventHandlers": {
"error": [],
"parseError": [],
"poolReady": [],
"message": [],
"close": [],
"timeout": []
},
"_serverState": "disconnected",
"_state": {
"runtimeStats": {
"queryStats": {
"m_n": 0,
"m_oldM": 0,
"m_oldS": 0,
"m_newM": 0,
"m_newS": 0,
"numDataValues": 0,
"mean": 0,
"variance": 0,
"standardDeviation": 0,
"sScore": 0
}
}
},
"recordQueryStats": false
},
"options": {},
"_applicationClosed": false,
"bsonLib": {
"Code": "[Function]",
"Symbol": "[Function]",
"BSON": "[Function]",
"DBRef": "[Function]",
"Binary": "[Function]",
"ObjectID": "[Function]",
"Long": "[Function]",
"Timestamp": "[Function]",
"Double": "[Function]",
"MinKey": "[Function]",
"MaxKey": "[Function]"
},
"bson": {},
"bson_deserializer": "[ Circular {bsonLib} ]",
"bson_serializer": "[ Circular {bsonLib} ]",
"_state": "disconnected",
"pkFactory": "[Function]",
"forceServerObjectId": false,
"safe": false,
"notReplied": {},
"isInitializing": true,
"auths": [],
"openCalled": false,
"commands": [],
"_callBackStore": {
"domain": null,
"_events": {},
"_maxListeners": 10,
"_notReplied": {}
},
"logger": "[ Circular {logger} ]",
"slaveOk": false,
"tag": 1460656848883,
"eventHandlers": {
"error": [],
"parseError": [],
"poolReady": [],
"message": [],
"close": []
},
"serializeFunctions": false,
"raw": false,
"recordQueryStats": false,
"reaperEnabled": false,
"_lastReaperTimestamp": 1460656848883,
"retryMiliSeconds": 1000,
"numberOfRetries": 60,
"reaperInterval": 10000,
"reaperTimeout": 30000
}
Note: The host, port, and databaseName fields have been modified for privacy.
This db object differs from a properly connecting db object by only two fields...
...both db.serverConfig._serverState and db._state equal 'connected' vs. 'disconnected'
Appreciate hearing from anyone that has successfully accomplished what I am trying to do.
Thanks!

Resources