SSL Error accessing azure datastore for Azure Auto ML - python-3.x
I am implementing Azure AutoML dashboard in a docker container.
When I access container without Docker it works. But in docker it gives SSL Error.
def upload_dataset_to_blob(ws):
datastore = ws.get_default_datastore()
datastore.upload_files(files=[
'/usr/src/mediafiles/train.csv'], target_path='beeeerrr-dataset/tabular/', overwrite=True, show_progress=True)
datastore.upload_files(files=[
'/usr/src/mediafiles/valid.csv'], target_path='beeeerrr-dataset/tabular/', overwrite=True, show_progress=True)
datastore.upload_files(files=[
'/usr/src/mediafiles/test.csv'], target_path='beeeerrr-dataset/tabular/', overwrite=True, show_progress=True)
train_dataset = Dataset.Tabular.from_delimited_files(
validate=False,
path=[(datastore, 'beeree-dataset/tabular/train.csv')])
valid_dataset = Dataset.Tabular.from_delimited_files(
validate=False,
path=[(datastore, 'beeree-dataset/tabular/valid.csv')])
test_dataset = Dataset.Tabular.from_delimited_files(
path=[(datastore, 'beeree-dataset/tabular/test.csv')])
return train_dataset, valid_dataset, test_dataset
This is the error I am getting
Uploading an estimated of 1 files
app_1 | Uploading /usr/src/mediafiles/train.csv
app_1 | Uploaded /usr/src/mediafiles/train.csv, 1 files out of an estimated total of 1
app_1 | Uploaded 1 files
app_1 | Uploading an estimated of 1 files
app_1 | Uploading /usr/src/mediafiles/valid.csv
app_1 | Uploaded /usr/src/mediafiles/valid.csv, 1 files out of an estimated total of 1
app_1 | Uploaded 1 files
app_1 | Uploading an estimated of 1 files
app_1 | Uploading /usr/src/mediafiles/test.csv
app_1 | Uploaded /usr/src/mediafiles/test.csv, 1 files out of an estimated total of 1
app_1 | Uploaded 1 files
app_1 | <bound method DataReference._get_normalized_path of $AZUREML_DATAREFERENCE_blob_test_data>
app_1 | Internal Server Error: /azureml/train/
app_1 | Traceback (most recent call last):
app_1 | File "/usr/local/lib/python3.8/site-packages/azureml/data/dataset_error_handling.py", line 65, in _validate_has_data
app_1 | dataflow.verify_has_data()
app_1 | File "/usr/local/lib/python3.8/site-packages/azureml/dataprep/api/_loggerfactory.py", line 206, in wrapper
app_1 | return func(*args, **kwargs)
app_1 | File "/usr/local/lib/python3.8/site-packages/azureml/dataprep/api/dataflow.py", line 875, in verify_has_data
app_1 | if len(self.take(1)._to_pyrecords()) == 0:
app_1 | File "/usr/local/lib/python3.8/site-packages/azureml/dataprep/api/dataflow.py", line 792, in _to_pyrecords
app_1 | self._engine_api.execute_anonymous_activity(
app_1 | File "/usr/local/lib/python3.8/site-packages/azureml/dataprep/api/_aml_helper.py", line 38, in wrapper
app_1 | return send_message_func(op_code, message, cancellation_token)
app_1 | File "/usr/local/lib/python3.8/site-packages/azureml/dataprep/api/engineapi/api.py", line 120, in execute_anonymous_activity
app_1 | response = self._message_channel.send_message('Engine.ExecuteActivity', message_args, cancellation_token)
app_1 | File "/usr/local/lib/python3.8/site-packages/azureml/dataprep/api/engineapi/engine.py", line 291, in send_message
app_1 | raise_engine_error(response['error'])
app_1 | File "/usr/local/lib/python3.8/site-packages/azureml/dataprep/api/errorhandlers.py", line 10, in raise_engine_error
app_1 | raise ExecutionError(error_response)
app_1 | azureml.dataprep.api.errorhandlers.ExecutionError:
app_1 | Error Code: ScriptExecution.DatastoreResolution.Unexpected
app_1 | Failed Step: XXXXXXXXXXXXXXXXXXXXXXXXX
app_1 | Error Message: ScriptExecutionException was caused by DatastoreResolutionException.
app_1 | DatastoreResolutionException was caused by UnexpectedException.
app_1 | Unexpected failure making request to fetching info for Datastore 'workspaceblobstore' in subscription: 'XXXXXXXXXXXXXXXXXXXXXXXXX', resource group: 'django-env', workspace: 'ml-demo-main'. Using base service url: https://centralus.experiments.azureml.net. HResult: 0x80131501.
app_1 | The SSL connection could not be established, see inner exception.
app_1 | | session_id=XXXXXXXXXXXXXXXXXXXXXXXXX
app_1 |
app_1 | During handling of the above exception, another exception occurred:
app_1 |
app_1 | Traceback (most recent call last):
app_1 | File "/usr/local/lib/python3.8/site-packages/django/core/handlers/exception.py", line 47, in inner
app_1 | response = get_response(request)
app_1 | File "/usr/local/lib/python3.8/site-packages/django/core/handlers/base.py", line 181, in _get_response
app_1 | response = wrapped_callback(request, *callback_args, **callback_kwargs)
app_1 | File "/usr/local/lib/python3.8/site-packages/django/views/generic/base.py", line 70, in view
app_1 | return self.dispatch(request, *args, **kwargs)
app_1 | File "/usr/local/lib/python3.8/site-packages/django/views/generic/base.py", line 98, in dispatch
app_1 | return handler(request, *args, **kwargs)
app_1 | File "/usr/src/app/azure_ml/views.py", line 50, in get
app_1 | azureml_train1()
app_1 | File "/usr/src/app/azure_ml/rough.py", line 39, in azureml_train1
app_1 | train_dataset, valid_dataset, test_dataset = upload_dataset_to_blob(ws)
app_1 | File "/usr/src/app/utils/azure_ml/dataset.py", line 28, in upload_dataset_to_blob
app_1 | train_dataset = Dataset.Tabular.from_delimited_files(
app_1 | File "/usr/local/lib/python3.8/site-packages/azureml/data/_loggerfactory.py", line 126, in wrapper
app_1 | return func(*args, **kwargs)
app_1 | File "/usr/local/lib/python3.8/site-packages/azureml/data/dataset_factory.py", line 322, in from_delimited_files
app_1 | dataflow = _transform_and_validate(
app_1 | File "/usr/local/lib/python3.8/site-packages/azureml/data/dataset_factory.py", line 966, in _transform_and_validate
app_1 | _validate_has_data(dataflow, 'Cannot load any data from the specified path. '
app_1 | File "/usr/local/lib/python3.8/site-packages/azureml/data/dataset_error_handling.py", line 68, in _validate_has_data
app_1 | raise DatasetValidationError(error_message + '\n' + e.compliant_message, exception=e)
app_1 | azureml.data.dataset_error_handling.DatasetValidationError: DatasetValidationError:
app_1 | Message: Cannot load any data from the specified path. Make sure the path is accessible and contains data.
app_1 | ScriptExecutionException was caused by DatastoreResolutionException.
app_1 | DatastoreResolutionException was caused by UnexpectedException.
app_1 | Unexpected failure making request to fetching info for Datastore 'workspaceblobstore' in subscription: 'XXXXXXXXXXXXXXXXXXXXXXXXX', resource group: 'django-env', workspace: 'ml-demo-main'. Using base service url: https://centralus.experiments.azureml.net. HResult: 0x80131501.
app_1 | Failed due to inner exception of type: HttpRequestException
app_1 | | session_id=XXXXXXXXXXXXXXXXXXXXXXXXX
app_1 | InnerException None
app_1 | ErrorResponse
app_1 | {
app_1 | "error": {
app_1 | "code": "UserError",
app_1 | "message": "Cannot load any data from the specified path. Make sure the path is accessible and contains data.\nScriptExecutionException was caused by DatastoreResolutionException.\n DatastoreResolutionException was caused by UnexpectedException.\n Unexpected failure making request to fetching info for Datastore 'workspaceblobstore' in subscription: 'XXXXXXXXXXXXXXXXXXXXXXXXX', resource group: 'django-env', workspace: 'ml-demo-main'. Using base service url: https://centralus.experiments.azureml.net. HResult: 0x80131501.\n Failed due to inner exception of type: HttpRequestException\n| session_id=XXXXXXXXXXXXXXXXXXXXXXXXX"
app_1 | }
app_1 | }
It uploads the file successfully but while accessing back it gives following error
I am using Service Principal for authentication.
def get_workspace():
svr_pr = ServicePrincipalAuthentication(
tenant_id=settings.TENANT_ID,
service_principal_id=settings.SERVICE_PRINCIPAL_ID,
service_principal_password=settings.SERVICE_PRINCIPAL_PASSWORD
)
ws = Workspace(
subscription_id=settings.SUBSCRIPTION_ID,
resource_group=settings.RESOURCE_GROUP,
workspace_name=settings.WORKSPACE_NAME,
auth=svr_pr
)
return ws
I faced the same problem, and after some debugging, I found the following in the log files:
Failed to retrieve datastore with exception: System.Net.Http.HttpRequestException: The SSL connection could not be established, see inner exception.
---> System.Security.Authentication.AuthenticationException: Authentication failed, see inner exception.
---> System.TypeInitializationException: The type initializer for 'SslMethods' threw an exception.
---> System.TypeInitializationException: The type initializer for 'Ssl' threw an exception.
---> System.TypeInitializationException: The type initializer for 'SslInitializer' threw an exception.
---> Interop+Crypto+OpenSslCryptographicException: error:0E076071:configuration file routines:MODULE_RUN:unknown module name
at Interop.SslInitializer..cctor()
--- End of inner exception stack trace ---
at Interop.Ssl..cctor()
--- End of inner exception stack trace ---
at Interop.Ssl.SslV2_3Method()
at Interop.Ssl.SslMethods..cctor()
--- End of inner exception stack trace ---
This pointed me to this thread: https://github.com/dotnet/runtime/issues/29855
It seems .NET 2.1 tries to use OpenSSL 1.0 while the distro's config at /etc/ssl/openssl.conf is not compatible with that. This has been reported to exist for multiple Debian versions.
Possible solutions:
Comment out the ssl_conf value in /etc/ssl/openssl.cnf
Uninstall OpenSSL 1.0.x
Set CLR_OPENSSL_VERSION_OVERRIDE=1.1 to force OpenSSL 1.1 to be used. (2.1 prefers binding to OpenSSL 1.0.x because that's what it shipped with... it only uses 1.1 if explicitly told to, or if 1.0 isn't available)
I'd tried the last one, and in my case it resolved the problem. I am using Debian Buster.
Related
Path to file is not forund of postgres script `COPY <table_name>(<columns>) FROM '<path>>' ...` within docker container using linux distribution
This issue has to do with the fact that the file exists on the backend container but not the postgres container. How could I transfer the file between containers automatically? I am currently trying to execute the following script: COPY climates( station_id, date, element, data_value, m_flag, q_flag, s_flag, obs_time ) FROM '/usr/api/2017.csv` DELIMITER ',' CSV HEADER; within a docker container running a sequelize backend connecting to a postgres:14.1-alpine container. The following error is returned: db_1 | 2022-08-30 04:23:58.358 UTC [29] ERROR: could not open file "/usr/api/2017.csv" for reading: No such file or directory db_1 | 2022-08-30 04:23:58.358 UTC [29] HINT: COPY FROM instructs the PostgreSQL server process to read a file. You may want a client-side facility such as psql's \copy. db_1 | 2022-08-30 04:23:58.358 UTC [29] STATEMENT: COPY climates( db_1 | station_id, db_1 | date, db_1 | element, db_1 | data_value, db_1 | m_flag, db_1 | q_flag, db_1 | s_flag, db_1 | obs_time db_1 | ) db_1 | FROM '/usr/api/2017.csv' db_1 | DELIMITER ',' db_1 | CSV HEADER; ebapi | Unable to connect to the database: MigrationError: Migration 20220829_02_populate_table.js (up) failed: Original error: could not open file "/usr/api/2017.csv" for reading: No such file or directory ebapi | at /usr/api/node_modules/umzug/lib/umzug.js:151:27 ebapi | at process.processTicksAndRejections (node:internal/process/task_queues:95:5) ebapi | at async Umzug.runCommand (/usr/api/node_modules/umzug/lib/umzug.js:107:20) ebapi | ... 2 lines matching cause stack trace ... ebapi | at async start (/usr/api/index.js:14:3) { ebapi | cause: Error ebapi | at Query.run (/usr/api/node_modules/sequelize/lib/dialects/postgres/query.js:50:25) ebapi | at /usr/api/node_modules/sequelize/lib/sequelize.js:311:28 ebapi | at process.processTicksAndRejections (node:internal/process/task_queues:95:5) ebapi | at async Object.up (/usr/api/migrations/20220829_02_populate_table.js:10:5) ebapi | at async /usr/api/node_modules/umzug/lib/umzug.js:148:21 ebapi | at async Umzug.runCommand (/usr/api/node_modules/umzug/lib/umzug.js:107:20) ebapi | at async runMigrations (/usr/api/util/db.js:52:22) ebapi | at async connectToDatabase (/usr/api/util/db.js:32:5) ebapi | at async start (/usr/api/index.js:14:3) { ebapi | name: 'SequelizeDatabaseError', ... Here is my docker-compose.yml # set up a postgres database version: "3.8" services: db: image: postgres:14.1-alpine restart: always environment: - POSTGRES_USER=postgres - POSTGRES_PASSWORD=postgres ports: - "5432:5432" volumes: - db:/var/lib/postgresql/data - ./db/init.sql:/docker-entrypoint-initdb.d/create_tables.sql api: container_name: ebapi build: context: ./energybot depends_on: - db ports: - 3001:3001 environment: DATABASE_URL: postgres://postgres:postgres#db:5432/postgres DB_HOST: db DB_PORT: 5432 DB_USER: postgres DB_PASSWORD: postgres DB_NAME: postgres links: - db volumes: - "./energybot:/usr/api" volumes: db: driver: local
Can't authenticate with mongoDB from docker-compose service
What I'm trying to do I'm trying to set up a docker-compose definition, where I have a mongoDB container, and a nodeJS container that connects to it. version: "3.9" services: events-db: image: mongo volumes: - db-volume:/data/db environment: MONGO_INITDB_ROOT_USERNAME: $SANDBOX_DB_USER MONGO_INITDB_ROOT_PASSWORD: $SANDBOX_DB_PASS MONGO_INITDB_DATABASE: sandboxdb app: image: node:15.12.0 user: node working_dir: /home/node/app volumes: - ./:/home/node/app:ro environment: MDB_CONNECTION: mongodb://$SANDBOX_DB_USER:$SANDBOX_DB_PASS#events-db:27017/sandboxdb command: node myapp depends_on: - events-db volumes: db-volume: Along with a .env file that declares the credentials (planning to use proper env variables when I deploy this to a production environment): SANDBOX_DB_USER=myuser SANDBOX_DB_PASS=myp4ss Finally, my nodejs script, myapp.js is simply trying to connect, grab a reference to a collection, and insert a document: require('dotenv').config() const { MongoClient } = require('mongodb') async function main () { console.log('Connecting') const client = new MongoClient(process.env.MDB_CONNECTION, { connectTimeoutMS: 10000, useUnifiedTopology: true, }) await client.connect() const db = client.db() const events = db.collection('events') console.log('Inserting an event') await events.insertOne({ type: 'foo', timestamp: new Date(), }) console.log('Done.') process.exit(0) } if (require.main === module) { main() } Result When I run docker-compose config I see the following output, so I would expect it to work: $ docker-compose config services: app: command: node myapp depends_on: events-db: condition: service_started environment: MDB_CONNECTION: mongodb://myuser:myp4ss#events-db:27017/sandboxdb image: node:15.12.0 user: node volumes: - C:\workspace\dcsandbox:/home/node/app:ro working_dir: /home/node/app events-db: environment: MONGO_INITDB_DATABASE: sandboxdb MONGO_INITDB_ROOT_PASSWORD: myp4ss MONGO_INITDB_ROOT_USERNAME: myuser image: mongo volumes: - db-volume:/data/db:rw version: '3.9' volumes: db-volume: {} However, when I run docker-compose up I see that my node container is unable to connect to the mongoDB to insert an event: events-db_1 | {"t":{"$date":"2021-04-07T13:57:36.793+00:00"},"s":"I", "c":"NETWORK", "id":23016, "ctx":"listener","msg":"Waiting for connections","attr":{"port":27017,"ssl":"off"}} app_1 | Connecting events-db_1 | {"t":{"$date":"2021-04-07T13:57:38.811+00:00"},"s":"I", "c":"NETWORK", "id":22943, "ctx":"listener","msg":"Connection accepted","attr":{"remote":"172.27.0.3:34164","connectionId":1,"connectionCount":1}} events-db_1 | {"t":{"$date":"2021-04-07T13:57:38.816+00:00"},"s":"I", "c":"NETWORK", "id":51800, "ctx":"conn1","msg":"client metadata","attr":{"remote":"172.27.0.3:34164","client":"conn1","doc":{"driver":{"name":"nodejs","version":"3.6.6"},"os":{"type":"Linux","name":"linux","architecture":"x64","version":"4.19.128-microsoft-standard"},"platform":"'Node.js v15.12.0, LE (unified)"}}} events-db_1 | {"t":{"$date":"2021-04-07T13:57:38.820+00:00"},"s":"I", "c":"NETWORK", "id":22943, "ctx":"listener","msg":"Connection accepted","attr":{"remote":"172.27.0.3:34166","connectionId":2,"connectionCount":2}} events-db_1 | {"t":{"$date":"2021-04-07T13:57:38.822+00:00"},"s":"I", "c":"NETWORK", "id":51800, "ctx":"conn2","msg":"client metadata","attr":{"remote":"172.27.0.3:34166","client":"conn2","doc":{"driver":{"name":"nodejs","version":"3.6.6"},"os":{"type":"Linux","name":"linux","architecture":"x64","version":"4.19.128-microsoft-standard"},"platform":"'Node.js v15.12.0, LE (unified)"}}} events-db_1 | {"t":{"$date":"2021-04-07T13:57:38.822+00:00"},"s":"I", "c":"ACCESS", "id":20251, "ctx":"conn2","msg":"Supported SASL mechanisms requested for unknown user","attr":{"user":"myuser#sandboxdb"}} events-db_1 | {"t":{"$date":"2021-04-07T13:57:38.823+00:00"},"s":"I", "c":"ACCESS", "id":20249, "ctx":"conn2","msg":"Authentication failed","attr":{"mechanism":"SCRAM-SHA-256","principalName":"myuser","authenticationDatabase":"sandboxdb","client":"172.27.0.3:34166","result":"UserNotFound: Could not find user \"myuser\" for db \"sandboxdb\""}} events-db_1 | {"t":{"$date":"2021-04-07T13:57:38.824+00:00"},"s":"I", "c":"ACCESS", "id":20249, "ctx":"conn2","msg":"Authentication failed","attr":{"mechanism":"SCRAM-SHA-1","principalName":"myuser","authenticationDatabase":"sandboxdb","client":"172.27.0.3:34166","result":"UserNotFound: Could not find user \"myuser\" for db \"sandboxdb\""}} events-db_1 | {"t":{"$date":"2021-04-07T13:57:38.826+00:00"},"s":"I", "c":"NETWORK", "id":22944, "ctx":"conn1","msg":"Connection ended","attr":{"remote":"172.27.0.3:34164","connectionId":1,"connectionCount":1}} app_1 | /home/node/app/node_modules/mongodb/lib/cmap/connection.js:268 app_1 | callback(new MongoError(document)); app_1 | ^ app_1 | app_1 | MongoError: Authentication failed. app_1 | at MessageStream.messageHandler (/home/node/app/node_modules/mongodb/lib/cmap/connection.js:268:20) app_1 | at MessageStream.emit (node:events:369:20) app_1 | at processIncomingData (/home/node/app/node_modules/mongodb/lib/cmap/message_stream.js:144:12) app_1 | at MessageStream._write (/home/node/app/node_modules/mongodb/lib/cmap/message_stream.js:42:5) app_1 | at writeOrBuffer (node:internal/streams/writable:395:12) app_1 | at MessageStream.Writable.write (node:internal/streams/writable:340:10) app_1 | at Socket.ondata (node:internal/streams/readable:750:22) app_1 | at Socket.emit (node:events:369:20) app_1 | at addChunk (node:internal/streams/readable:313:12) app_1 | at readableAddChunk (node:internal/streams/readable:288:9) { app_1 | ok: 0, app_1 | code: 18, app_1 | codeName: 'AuthenticationFailed' app_1 | } events-db_1 | {"t":{"$date":"2021-04-07T13:57:38.832+00:00"},"s":"I", "c":"NETWORK", "id":22944, "ctx":"conn2","msg":"Connection ended","attr":{"remote":"172.27.0.3:34166","connectionId":2,"connectionCount":0}} dcsandbox_app_1 exited with code 1 I've put the full output at https://pastebin.com/uNyJ6tiy and the example code at this repo: https://github.com/akatechis/example-docker-compose-mongo-node-auth
After some more digging, I managed to figure it out. The issue is that the MONGO_INITDB_ROOT_USERNAME and MONGO_INITDB_ROOT_PASSWORD variables simply set the root user's credentials, and the MONGO_INITDB_DATABASE simply sets the initial database for scripts in /docker-entrypoint-initdb.d. By default, the root user is added to the admin database, so by removing the /sandboxdb part of the connection string, I was able to have my node app authenticate against the admin DB as the root user. While this doesn't quite accomplish what I wanted initially (to create a separate, non-root user for my database, and use that to authenticate), I think this puts me on the right path to using an init script to set up the user accounts I want to have.
How to figure out why error happen during running app in ember fastboot
I'm trying to integrate third-party npm library (editorjs like npm package npm i #editorjs/editorjs --save-dev) to ember application with auto loading "ember-auto-import": "^1.5.3", Everything works fine without fastboot, but under fastboot application crashing with following error: nodejs_1 | TypeError: Cannot read property 'syscall' of null nodejs_1 | at AppendOpcodes.evaluate (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/#glimmer/runtime.js:2003:1) nodejs_1 | at LowLevelVM.evaluateSyscall (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/#glimmer/runtime.js:4923:1) nodejs_1 | at LowLevelVM.evaluateInner (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/#glimmer/runtime.js:4879:1) nodejs_1 | at LowLevelVM.evaluateOuter (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/#glimmer/runtime.js:4871:1) nodejs_1 | at JitVM.next (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/#glimmer/runtime.js:5815:1) nodejs_1 | at TemplateIteratorImpl.next (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/#glimmer/runtime.js:5944:1) nodejs_1 | at RootState.render (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/#ember/-internals/glimmer/index.js:8771:1) nodejs_1 | at runInAutotrackingTransaction (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/#glimmer/validator.js:106:1) nodejs_1 | at /tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/#ember/-internals/glimmer/index.js:9069:1 nodejs_1 | at inTransaction (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/#glimmer/runtime.js:1959:1) nodejs_1 | at InertRenderer._renderRoots (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/#ember/-internals/glimmer/index.js:9043:1) nodejs_1 | at InertRenderer._renderRootsTransaction (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/#ember/-internals/glimmer/index.js:9104:1) nodejs_1 | at InertRenderer._revalidate (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/#ember/-internals/glimmer/index.js:9146:1) nodejs_1 | at invokeWithOnError (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:347:1) nodejs_1 | at Queue.flush (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:229:1) nodejs_1 | at DeferredActionQueues.flush (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:426:1) nodejs_1 | at Backburner._end (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:960:1) nodejs_1 | at Backburner.end (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:710:1) nodejs_1 | at Backburner._run (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:1009:1) nodejs_1 | at Backburner._join (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:989:1) nodejs_1 | at Backburner.join (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:760:1) nodejs_1 | at Array.loopEnd (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/#ember/-internals/glimmer/index.js:8888:1) nodejs_1 | at Backburner._trigger (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:1084:1) nodejs_1 | at Backburner._end (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:979:1) nodejs_1 | at Backburner.end (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:710:1) nodejs_1 | at Backburner._run (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:1009:1) nodejs_1 | at Backburner._join (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:989:1) nodejs_1 | at Backburner.join (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:760:1) nodejs_1 | at Array.loopEnd (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/#ember/-internals/glimmer/index.js:8888:1) nodejs_1 | at Backburner._trigger (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:1084:1) nodejs_1 | at Backburner._end (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:979:1) nodejs_1 | at Backburner.end (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:710:1) nodejs_1 | at Backburner._run (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:1009:1) nodejs_1 | at Backburner._join (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:989:1) nodejs_1 | at Backburner.join (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/backburner.js:760:1) nodejs_1 | at Function.join (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/#ember/runloop/index.js:168:1) nodejs_1 | at Object.hash.success (/tmp/broccoli-1Awdijh61s6pE/out-475-append_ember_auto_import_analyzer/assets/addon-tree-output/#ember-data/adapter/rest.js:919:1) nodejs_1 | at fire (/app/node_modules/jquery-deferred/lib/jquery-callbacks.js:78:30) nodejs_1 | at Object.fireWith (/app/node_modules/jquery-deferred/lib/jquery-callbacks.js:188:7) nodejs_1 | at Object.fire [as resolve] (/app/node_modules/jquery-deferred/lib/jquery-callbacks.js:195:10) nodejs_1 | at dataHandler (/app/node_modules/najax/lib/najax.js:167:13) nodejs_1 | at IncomingMessage.<anonymous> (/app/node_modules/najax/lib/najax.js:198:9) nodejs_1 | at IncomingMessage.emit (events.js:322:22) nodejs_1 | at IncomingMessage.EventEmitter.emit (domain.js:482:12) nodejs_1 | at endReadableNT (_stream_readable.js:1187:12) nodejs_1 | at processTicksAndRejections (internal/process/task_queues.js:84:21) fastboot mode check like this.fastboot.isFastBoot not solve the problem and looks like error throwing even if I just import #editorjs/editorjs. Maybe someone came across, so ready for any information to get direction for digg in. Example of created component: import Component from '#ember/component'; import EditorJS from '#editorjs/editorjs'; import { inject as service } from '#ember/service'; export default Component.extend({ fastboot: service(), didInsertElement() { if (!this.fastboot.isFastBoot) { const editor = new EditorJS({ holder: this.elementId }); } } }); Environment: DEBUG: ------------------------------- index.js:194 DEBUG: Ember : 3.18.0 index.js:194 DEBUG: Ember Data : 3.17.0 index.js:194 DEBUG: jQuery : 3.5.0 index.js:194 DEBUG: Ember Bootstrap : 2.8.1 index.js:194 DEBUG: ------------------------------- Update: Looks like the problem in third-party npm library, cause didInsertElement shouldn't work at all in fastboot As workaround I added this lib as incompatible JavaScript app.import('vendor/fastboot-incompatible.js', { using: [ { transformation: 'fastbootShim' } ] }); And looks like this approach solve problem. But any way, the question still actual cause I wanna to understand crash reason, after this error fastboot require reloading and doesn't work more. Expected behavior - some error without total crash.
python, can't import Google Cloud Pub/Sub package
My script work normally, but the error occur when I run it on the docker. app_1 | from google.cloud import pubsub app_1 | File "/usr/local/lib/python3.5/site-packages/google/cloud/pubsub/__init__.py", line 30, in <module> app_1 | from google.cloud.pubsub.client import Client app_1 | File "/usr/local/lib/python3.5/site-packages/google/cloud/pubsub/client.py", line 19, in <module> app_1 | from google.cloud.client import ClientWithProject app_1 | ImportError: cannot import name 'ClientWithProject' In docker requirement, I added google-cloud-pubsub==0.23.0
NodeJs request modules runtime Error: ./..../tough-cookie/package.json
When I install NodeJs module, there is a bug in the running. request module is the latest version: npm install request stream.js:74 throw er; // Unhandled stream error in pipe. ^ Error: ./~/request/~/tough-cookie/package.json Module parse failed: /Users/DongYao/Develop/proj-1/weicang_c/node_modules/request/node_modules/tough-cookie/package.json Line 2: Unexpected token : You may need an appropriate loader to handle this file type. | { | "author": { | "name": "Jeremy Stashewsky", | "email": "jstashewsky#salesforce.com" # ./~/request/~/tough-cookie/lib/cookie.js 38:14-40./~/request/~/mime-types/~/mime-db/db.json Module parse failed: /Users/DongYao/Develop/proj-1/weicang_c/node_modules/request/node_modules/mime-types/node_modules/mime-db/db.json Line 2: Unexpected token : You may need an appropriate loader to handle this file type. | { | "application/1d-interleaved-parityfec": { | "source": "iana" | }, ================== and more......... Please help me.