I'm trying to map a local directory to a folder on my hosting provider, but I get a connection timeout error.
I've tried logging on from different ISP's, but I still get the same error. And I use these same credentials successfully in Filezilla.
Here is my config file:
{
// sftp, ftp or ftps
"type": "sftp",
"save_before_upload": true,
"upload_on_save": false,
"sync_down_on_open": false,
"sync_skip_deletes": false,
"sync_same_age": true,
"confirm_downloads": false,
"confirm_sync": true,
"confirm_overwrite_newer": false,
"host": "mydomain.com",
"user": "myusername",
"password": "mypassword",
// "port": "22",
"remote_path": "/",
"ignore_regexes": [
"\\.sublime-(project|workspace)", "sftp-config(-alt\\d?)?\\.json",
"sftp-settings\\.json", "/venv/", "\\.svn/", "\\.hg/", "\\.git/",
"\\.bzr", "_darcs", "CVS", "\\.DS_Store", "Thumbs\\.db", "desktop\\.ini"
],
//"file_permissions": "664",
//"dir_permissions": "775",
//"extra_list_connections": 0,
"connect_timeout": 30,
//"keepalive": 120,
//"ftp_passive_mode": true,
//"ftp_obey_passive_host": false,
//"ssh_key_file": "~/.ssh/id_rsa",
//"sftp_flags": ["-F", "/path/to/ssh_config"],
//"preserve_modification_times": false,
//"remote_time_offset_in_hours": 0,
//"remote_encoding": "utf-8",
//"remote_locale": "C",
//"allow_config_upload": false,
}
Related
I used Azure DevOps Migration Tools to migrate all types of a work items from two azure projects in the same organization
I search a lot and went through all documents but I not getting a robust solution on how to migrate parent-child link items to the destination project. I make the LinkMigrationSaveEachAsAdded flag true/false but it did not work.
My configuration file,
{
"Version": "12.0",
"LogLevel": "Verbose",
"workaroundForQuerySOAPBugEnabled": false,
"Source": {
"$type": "TfsTeamProjectConfig",
"Collection": "https://dev.azure.com/MyNameOrg/",
"Project": "SourceProject",
"ReflectedWorkItemIDFieldName": "TfsMigrationTool.ReflectedWorkItemId",
"AuthenticationMode": "AccessToken",
"AllowCrossProjectLinking": false,
"PersonalAccessToken": "MyTokenHidden",
"LanguageMaps": {
"AreaPath": "Area",
"IterationPath": "Iteration"
}
},
"Target": {
"$type": "TfsTeamProjectConfig",
"Collection": "https://dev.azure.com/MyNameOrg/",
"Project": "DestinationProject",
"ReflectedWorkItemIDFieldName": "Custom.ReflectedWorkItemId",
"AuthenticationMode": "AccessToken",
"AllowCrossProjectLinking": false,
"PersonalAccessToken": "MyTokenHidden",
"LanguageMaps": {
"AreaPath": "Area",
"IterationPath": "Iteration"
}
},
"FieldMaps": [
{
"$type": "TreeToTagMapConfig",
"WorkItemTypeName": "*",
"toSkip": 3,
"timeTravel": 1
}
],
"WorkItemTypeDefinition": {
"sourceWorkItemTypeName": "targetWorkItemTypeName"
},
"GitRepoMapping": null,
"Processors": [
{
"$type": "WorkItemMigrationConfig",
"Enabled": true,
"ReplayRevisions": true,
"PrefixProjectToNodes": false,
"UpdateCreatedDate": true,
"UpdateCreatedBy": true,
"WIQLQueryBit": " AND [Custom.ReqID] contains 'CR' AND [System.WorkItemType] NOT IN ('Test Suite', 'Test Plan')",
"WIQLOrderBit": "[System.ChangedDate] desc",
"LinkMigration": true,
"AttachmentMigration": true,
"AttachmentWorkingPath": "c:\\temp\\WorkItemAttachmentWorkingFolder\\",
"FixHtmlAttachmentLinks": false,
"SkipToFinalRevisedWorkItemType": false,
"WorkItemCreateRetryLimit": 5,
"FilterWorkItemsThatAlreadyExistInTarget": false,
"PauseAfterEachWorkItem": false,
"AttachmentMaxSize": 480000000,
"AttachRevisionHistory": false,
"LinkMigrationSaveEachAsAdded": true,
"GenerateMigrationComment": false,
"WorkItemIDs": null,
"MaxRevisions": 0,
"NodeStructureEnricherEnabled": true,
"UseCommonNodeStructureEnricherConfig": false,
"NodeBasePaths": [],
"AreaMaps": {},
"IterationMaps": {},
"MaxGracefulFailures": 0
}
]
}
[SKIP] Unable to migrate link where Link of type System.LinkTypes.Hierarchy-Forward where wiSourceL=182, wiSourceR=222, wiTargetL=189, wiTargetR=222 as target WI has not been migrated #1335
Given the configuration file in question, it works for all types of migration such as parent to child, attachments such as pdf, images, docs, xls, etc.
The problem in the above file is on line
"WIQLQueryBit": " AND [Custom.ReqID] contains 'CR' AND [System.WorkItemType] NOT IN ('Test Suite', 'Test Plan')",
Custom.ReqID field should be required in every epic, feature, user story, bug, and task and the field value should start with the words CR.
I have angular 12 application. When i run ng lint--fix the linter is working.
In our company we use another library where we are having some reusable components.
When the version is 3.0.0 the linter is working.
When I set the version in my package.json file to 3.1.0 - because some bug fixes were done in mean time, I get error when i try to lint my application.
The error is:
[error] FatalError: Failed to load C:\xecm-test-two\xecm\xecm-webapp-angular\src\tslint.json: Invalid "extends" configuration value - could not require "tslint-angular". Review the Node lookup algorithm (https://nodejs.org/api/modules.html#modules_all_together) for the approximate method TSLint uses to find the referenced configuration file.
at new FatalError (C:\xecm-test-two\xecm\xecm-webapp-angular\node_modules\tslint\lib\error.js:28:28)
at Object.findConfiguration (C:\xecm-test-two\xecm\xecm-webapp-angular\node_modules\tslint\lib\configuration.js:58:15)
at _lint (C:\xecm-test-two\xecm\xecm-webapp-angular\node_modules\#angular-devkit\build-angular\src\tslint\index.js:151:40)
at _run (C:\xecm-test-two\xecm\xecm-webapp-angular\node_modules\#angular-devkit\build-angular\src\tslint\index.js:70:35)
So the folder structure is xecm-test-two => xecm => xecm-webapp-angular ( this is angular 12 app )
My tslint in src folder looks like this
{
"extends": "../tslint.json",
"rules": {
"directive-selector": [
true,
"attribute",
"xecm",
"camelCase"
],
"component-selector": [
true,
"element",
"xecm",
"kebab-case"
]
}
}
and the tsline.json file looks like this - this is the file which is extended
{
"extends": "company-linter-tslint/tslint",
"rules": {
"prettier": false,
"arrow-return-shorthand": true,
"callable-types": true,
"class-name": true,
"comment-format": [
true,
"check-space"
],
"curly": true,
"deprecation": {
"severity": "warn"
},
"eofline": true,
"forin": true,
"import-blacklist": [
true,
"rxjs/Rx"
],
"import-spacing": true,
"ter-indent": [
true,
4,
{
"SwitchCase": 1
}
],
"indent": [
true,
"spaces",
4
],
"interface-over-type-literal": true,
"label-position": true,
"max-line-length": [
true,
250
],
"member-access": false,
"member-ordering": [
true,
{
"order": [
"static-field",
"instance-field",
"static-method",
"instance-method"
]
}
],
"no-arg": true,
"no-bitwise": true,
"no-console": [
true,
"debug",
"info",
"time",
"timeEnd",
"trace"
],
"no-construct": true,
"no-debugger": true,
"no-duplicate-super": true,
"no-empty": false,
"no-empty-interface": true,
"no-eval": true,
"no-inferrable-types": [
true,
"ignore-params"
],
"no-misused-new": true,
"no-non-null-assertion": true,
"no-redundant-jsdoc": true,
"no-shadowed-variable": true,
"no-string-literal": false,
"no-string-throw": true,
"no-switch-case-fall-through": true,
"no-trailing-whitespace": true,
"no-unnecessary-initializer": true,
"no-unused-expression": true,
"no-var-keyword": true,
"object-literal-sort-keys": false,
"one-line": [
true,
"check-open-brace",
"check-catch",
"check-else",
"check-whitespace"
],
"prefer-const": true,
"quotemark": [
true,
"single"
],
"radix": true,
"semicolon": [
true,
"always"
],
"triple-equals": [
true,
"allow-null-check"
],
"typedef-whitespace": [
true,
{
"call-signature": "nospace",
"index-signature": "nospace",
"parameter": "nospace",
"property-declaration": "nospace",
"variable-declaration": "nospace"
}
],
"unified-signatures": true,
"variable-name": false,
"whitespace": [
true,
"check-branch",
"check-decl",
"check-operator",
"check-separator",
"check-type"
],
"no-output-on-prefix": true,
"no-input-rename": true,
"no-output-rename": true,
"use-life-cycle-interface": true,
"use-pipe-transform-interface": true,
"component-class-suffix": true,
"directive-class-suffix": true,
"max-classes-per-file": [true, 5]
}
}
I really can't understand why I get this error only after I increase some version of my company library.
How can I debug and find where is the error ?
I have a code like this
let item = { name: 'Roger' }
try {
return await collection.save(item)
}
catch (err) {
}
Now the collection I'm saving to has a Unique index on the field called name. Now during the exception handling the err object would look something like this
{
"isArangoError": true,
"response": {
"_readableState": {
"objectMode": false,
"highWaterMark": 16384,
"buffer": {
"head": null,
"tail": null,
"length": 0
},
"length": 0,
"pipes": [
],
"flowing": true,
"ended": true,
"endEmitted": true,
"reading": false,
"sync": true,
"needReadable": false,
"emittedReadable": false,
"readableListening": false,
"resumeScheduled": false,
"paused": false,
"errorEmitted": false,
"emitClose": true,
"autoDestroy": false,
"destroyed": false,
"defaultEncoding": "utf8",
"awaitDrainWriters": null,
"multiAwaitDrain": false,
"readingMore": true,
"decoder": null,
"encoding": null
},
"body": {
"code": 409,
"error": true,
"errorMessage": "unique constraint violated - in index name_is_unique of type persistent over 'name'; conflicting key: 15816187",
"errorNum": 1210
},
"arangojsHostId": 0
},
"statusCode": 409,
"errorNum": 1210,
"code": 409
}
While the error message is indeed helpful in conveying that name should be unique, wish there is an attribute/field in the error object to grab the conflicted field name.
How to get the field name other than regex parsing from the error message?
I have AWS redis which stores some data. I want to get the stored data using the Redis endpoint URL and nodejs. Below are my code and response which I am getting.
code:
let client = require('redis').createClient('redis://abcbsamv:6379', {no_ready_check: true});
res.send(client)
response from the above code:
{
"_events": {},
"_eventsCount": 1,
"address": "endpoints:6379",
"connection_options": {
"port": 6379,
"host": "endpoints",
"family": 4
},
"connection_id": 0,
"connected": false,
"ready": false,
"should_buffer": false,
"max_attempts": 0,
"command_queue": [],
"offline_queue": [],
"pipeline_queue": [],
"connect_timeout": 3600000,
"enable_offline_queue": true,
"retry_max_delay": null,
"retry_timer": null,
"retry_totaltime": 0,
"retry_delay": 200,
"retry_backoff": 1.7,
"attempts": 1,
"pub_sub_mode": 0,
"subscription_set": {},
"monitoring": false,
"message_buffers": false,
"closing": false,
"server_info": {},
"old_state": null,
"fire_strings": true,
"pipeline": false,
"sub_commands_left": 0,
"times_connected": 0,
"buffers": false,
"options": {
"port": "6379"
}
Please help me.
After following the advice presented here:
(OpenShift) Sharing MongoDB between Apps not possible anymore?
...which in a nutshell is this -> "You need to create the application as scalable, then add MongoDB for it to work correctly. MongoDB will then go onto it's own gear, with it's own ip address and port number (remember to use this port number, or nothing will work)."
...I am still unable to successfully connect my Node.js app to the MongoDB database living on another Node.js app in Openshift.
I have tried the following permutations of a host string that have been suggested:
localhost
128.xxx.xxx.xxx
[24 character hash]-myapp-mydomain.rhcloud.com
...all with the same result. No connection.
Here is the code that makes the call to setup the db connection...
dbProvider = function(host, port, user, pass) {
console.re.log("Attempting to create a DB instance...");
this.db = new Db(process.env.OPENSHIFT_MONGODB_EXTDB_DBNAME, new Server(host, port, { auto_reconnect: true }, {}));
console.re.log("created the DB instance...");
if(this.db!= null){
console.re.log("successfully connected to remote db %s at %s:%s",this.db,host,port);
this.db.open(function(error, db){
if(error){
console.re.log("error %s attempting to open the db at %s:%s",error,host,port);
}
else{
db.authenticate(user, pass, function(error, result) { //db.authenticate(user, pass, function(error, result)
if(error){
console.re.log("error %s attempting to connect to remote db at %s:%s using user %s",error,host,port,user);
}
else{
console.re.log("successfully connected to remote db at %s:%s using user %s",host,port,user);
}
});
}
});
}
else{
console.re.log("unable to connect to remote db at %s:%s",host,port);
}
}
The above code fails at the callback from the db.open() with a null error object.
Upon inspecting the this.db object we see the following:
{
"domain": null,
"_events": {},
"_maxListeners": 10,
"databaseName": "myDb",
"serverConfig": {
"domain": null,
"_events": {},
"_maxListeners": 10,
"host": "xxxxxxxxxxxxxxxxxxxxxxxx-myApp-myDomain.rhcloud.com",
"port": 12345,
"options": {
"auto_reconnect": true
},
"internalMaster": false,
"connected": false,
"poolSize": 5,
"disableDriverBSONSizeCheck": false,
"ssl": false,
"_used": true,
"_readPreference": null,
"socketOptions": {},
"logger": {
"error": "[Function]",
"log": "[Function]",
"debug": "[Function]"
},
"eventHandlers": {
"error": [],
"parseError": [],
"poolReady": [],
"message": [],
"close": [],
"timeout": []
},
"_serverState": "disconnected",
"_state": {
"runtimeStats": {
"queryStats": {
"m_n": 0,
"m_oldM": 0,
"m_oldS": 0,
"m_newM": 0,
"m_newS": 0,
"numDataValues": 0,
"mean": 0,
"variance": 0,
"standardDeviation": 0,
"sScore": 0
}
}
},
"recordQueryStats": false
},
"options": {},
"_applicationClosed": false,
"bsonLib": {
"Code": "[Function]",
"Symbol": "[Function]",
"BSON": "[Function]",
"DBRef": "[Function]",
"Binary": "[Function]",
"ObjectID": "[Function]",
"Long": "[Function]",
"Timestamp": "[Function]",
"Double": "[Function]",
"MinKey": "[Function]",
"MaxKey": "[Function]"
},
"bson": {},
"bson_deserializer": "[ Circular {bsonLib} ]",
"bson_serializer": "[ Circular {bsonLib} ]",
"_state": "disconnected",
"pkFactory": "[Function]",
"forceServerObjectId": false,
"safe": false,
"notReplied": {},
"isInitializing": true,
"auths": [],
"openCalled": false,
"commands": [],
"_callBackStore": {
"domain": null,
"_events": {},
"_maxListeners": 10,
"_notReplied": {}
},
"logger": "[ Circular {logger} ]",
"slaveOk": false,
"tag": 1460656848883,
"eventHandlers": {
"error": [],
"parseError": [],
"poolReady": [],
"message": [],
"close": []
},
"serializeFunctions": false,
"raw": false,
"recordQueryStats": false,
"reaperEnabled": false,
"_lastReaperTimestamp": 1460656848883,
"retryMiliSeconds": 1000,
"numberOfRetries": 60,
"reaperInterval": 10000,
"reaperTimeout": 30000
}
Note: The host, port, and databaseName fields have been modified for privacy.
This db object differs from a properly connecting db object by only two fields...
...both db.serverConfig._serverState and db._state equal 'connected' vs. 'disconnected'
Appreciate hearing from anyone that has successfully accomplished what I am trying to do.
Thanks!