I've created automated tests with Nightwatch-Cucumber based on Nightwatch.js. I can start the tests on local machine, the Selenium server starts on local machine and the tests will be executed.
But now I want to integrate the existing tests in a Kubernetes environment. On local machine I want to use minikube, helm, a jenkins chart to start the tests and a selenium chart. But this setup is quiet different to the local one. I want to start the tests on the Jenkins instance and the tests should be executed against the running Selenium server delivered by the selenium chart. So I want to use such a "remote" Selenium server. I don't want to use a local Selenium server that starts on runtime, but a still existing Selenium server somewhere in the Kubernetes environment
But how to configure my nightwatch.conf.js configuration to realize that scenario?
My current configuration looks like this:
const config = {
output_folder: "reports",
custom_commands_path: "commands",
// custom_assertions_path: 'assertions',
live_output: false,
page_objects_path: "pageobjects",
disable_colors: false,
selenium: {
start_process: true,
server_path: seleniumServer.path,
log_path: "",
host: "127.0.0.1",
port: 4444
},
test_settings: {
default: {
globals: {
waitForConditionTimeout: 30000,
waitForConditionPollInterval: 500
},
screenshots: {
enabled: true,
on_failure: true,
path: "screenshots"
},
//launch_url: "http://localhost:8087",
//selenium_port: 4444,
//selenium_host: "127.0.0.1",
desiredCapabilities: {
browserName: "phantomjs",
javascriptEnabled: true,
acceptSslCerts: true,
"phantomjs.binary.path": phantomjs.path
}
},
First step, make sure your remote Selenium-server is accessable( checking host IP and port )
Secondly, config following :
const config = {
output_folder: "reports",
custom_commands_path: "commands",
// custom_assertions_path: 'assertions',
live_output: false,
page_objects_path: "pageobjects",
disable_colors: false,
selenium: {
start_process: false, // turn this off and comment all below config
// server_path: seleniumServer.path,
// log_path: "",
// host: "127.0.0.1",
// port: 4444
},
test_settings: {
default: {
globals: {
waitForConditionTimeout: 30000,
waitForConditionPollInterval: 500
},
screenshots: {
enabled: true,
on_failure: true,
path: "screenshots"
},
launch_url: "http://localhost:8087",
selenium_port: 4444, // provide your selenium port in 1st step
selenium_host: "127.0.0.1", // provide your selenium address in 1st step
desiredCapabilities: {
browserName: "phantomjs",
javascriptEnabled: true,
acceptSslCerts: true,
"phantomjs.binary.path": phantomjs.path
}
},
Related
I just create a new Laravel 9 project and I'm using lando.
I have followed instructions from this : https://sinnbeck.dev/posts/getting-vite-and-laravel-to-work-with-lando
Currently, the site is working, I can update php, css or js code.
But, there is no livereloading and I've got an error in my console about a missing sourcemapping for the css at http://localhost:3009/resources/css/app.css.map
There is a link to the project : https://github.com/CrunchyArtie/warene-next
There is my vite.config.js file:
export default defineConfig({
plugins: [
laravel({
input: ['resources/css/app.css', 'resources/js/app.js'],
refresh: true,
}),
],
server: {
https: false,
host: true,
port: 3009,
hmr: {host: 'localhost', protocol: 'ws'},
},
});
and my .lando.yml file :
name: warene
recipe: laravel
config:
webroot: ./public
php: '8.1'
xdebug: true
services:
node:
type: node:16
scanner: false
ports:
- 3009:3009
build:
- npm install
tooling:
dev:
service: node
cmd: npm run dev
build:
service: node
cmd: npm run build
EDIT :
With this vite.config.js the livereloading works :
export default defineConfig({
plugins: [
laravel({
input: ['resources/css/app.css', 'resources/js/app.js'],
refresh: true,
}),
],
server: {
https: false,
host: true,
strictPort: true,
port: 3009,
hmr: {host: 'localhost', protocol: 'ws'},
watch: {
usePolling: true,
}
},
});
With css.devSourcemap: true a sourcemap file is generated and used.
With server.watch.usePolling: true vite will detect file changed inside lando environment.
This is my is my vite.config.js file:
export default defineConfig({
plugins: [
laravel({
input: ['resources/css/app.css', 'resources/js/app.js'],
refresh: true,
}),
],
css: {
devSourcemap: true,
},
server: {
https: false,
host: true,
strictPort: true,
port: 3009,
hmr: {host: 'localhost', protocol: 'ws'},
watch: {
usePolling: true,
}
},
});
Connecting to my my DigitalOcean database with Sequelize works fine when I'm not migrating. For example, attempting to create a new table works just fine; the code below successfully connects and creates a new table.
sequelize = new Sequelize(config.use_env_variable, config);
sequelize.authenticate().then(console.log('success')).catch((error) => console.log(error));
sequelize.define('test-table', {
test_id: {
type: Sequelize.INTEGER,
},
});
sequelize.sync();
I have a CA certificate .crt file I downloaded from DigitalOcean that I'm passing in with the Sequelize options. My config.js looks like
development: {
use_env_variable: 'postgresql://[digitalocean_host_url]?sslmode=require',
ssl: true,
dialectOptions: {
ssl: {
require: true,
rejectUnauthorized: false,
ca: fs.readFileSync(`${__dirname}/../.postgresql/root.crt`),
},
},
},
However when I try to create tables using migrations with
npx sequelize-cli db:migrate
I receive the following output and error:
Parsed url postgresql://[digitalocean_host_url]?sslmode=require
ERROR: no pg_hba.conf entry for host [host], user [user], database [database], SSL off
Which is very strange, because SSL is working when I create a table using just Sequelize sync. I have a .sequelizerc file for the sequelize-cli configurations, which looks like this:
const path = require('path');
const env = process.env.NODE_ENV || 'development'
const config = require('./config/config')[env];
module.exports = {
'config': path.resolve('config', 'config.js'),
'url': config.use_env_variable,
'options-path': path.resolve('config', 'sql-options.json')
}
inside my sql-options.json I have the following
{
"use_env_variable": "postgresql://[digitalocean_host_url]?sslmode=require",
"dialect":"postgres",
"ssl": true,
"dialectOptions": {
"ssl": {
"required": true,
"rejectUnauthorized": true,
"ca": "/../.postgresql/root.crt"
}
}
}
I've tried a lot of the advice from various resources, including the sequelize/cli repo. But none of it seems to work. Any advice would be helpful.
I had the same issue and the fix was to add the code below in the migrations config file even though you already have it in the database connection file.
The following code is in the config/config.js file for migrations.
production: {
username: ****,
password: ****,
database: ****,
host: ****,
dialect: ****,
port: ****,
dialectOptions: {
ssl: {
require: true,
rejectUnauthorized: false,
},
},
},
This is how my DB connection looks like that was working normally.
const sequelize = new Sequelize({
host: ****,
database: ****,
username: ****,
password: ****,
dialect: ****,
port: ****,
dialectOptions: {
ssl: {
require: true,
rejectUnauthorized: false,
},
},
});
I have been working on a NodeJS project locally on my (Windows) machine which connects to a SQL server database using Sequelize. It works fine on my local machine but when I upload it to an Azure Web App I get the following error on each request..
Failed to connect to x.x.x.x:1433 - socket hang up (IP address replaced with X's by me)
The code is identical and I have checked there are no IP restrictions on the database, it's totally open. I have also checked that the environment variables (username/password etc.) are identical and being loaded correctly in production. In fact, my logging shows that the connection attempt is identical.
The only thing that makes it work is removing the Sequelize/tedious option {encrypt:true}.
So, just to be clear, encrypted connections work from localhost but not from an azure web app.
I have literally no idea why this could be so any suggestions would be helpful, even if they just point me in the right direction.
Here is my sequelize setup. Obviously I've redacted the host IP address and login details but they are the same from dev and production at the moment.
{
'production': {
'username': xxx,
'password': xxx,
'database': xxx,
'host': xxx,
'dialect': 'mssql',
'dialectOptions': {
'options': {
'encrypt': true,
'multipleStatements': true,
'validateBulkLoadParameters': false
}
},
'omitNull': true,
'pool': {
'max': 100,
'min': 0,
'acquire': 30000,
'idle': 10000
},
'logging': false
},
'development': {
'username': xxx,
'password': xxx,
'database': xxx,
'host': xxx,
'dialect': 'mssql',
'dialectOptions': {
'options': {
'encrypt': true,
'multipleStatements': true,
'validateBulkLoadParameters': false,
'debug': {
'packet': true,
'data': true,
'payload': true,
'token': true
}
}
},
'omitNull': true,
'pool': {
'max': 100,
'min': 0,
'acquire': 30000,
'idle': 10000
},
'logging': false
}
}
When I run truffle migrate all my .json files in my build/contracts directory are missing their network field. Im trying to deploy to testrpc.
This is my truffle.js file:
module.exports = {
networks: {
development: {
host: "localhost",
port: 8545,
network_id: "*", // Match any network id
gas: 4500000
}
},
solc: {
optimizer: {
enabled: true,
runs: 200
}
},
mocha: {
bail: true
}
};
Does anyone know what might be causing this?
Thanks!
Our development group is starting a new React project and I have been trying to use Nightwatch + Selenium to do the e2e testing. I got it to work when running everything using NodeJS 6.9.4. Now we have been forced to upgrade NodeJS to 8.1.4 and I'm facing an issue that is stopping me to proceed with testing. When using Selenium with Chrome as browser, I keep getting a 'This site can't be reached' message (but the page can be accessed if I open manually a Chrome window. Any idea what can be going on? Here you have the test result log and my nightwatch.conf.js
Test Result:
INFO Request: GET /wd/hub/session/fc36e7a7-4909-4dfd-a853-6d769accb085/element/0/text
- data:
- headers: {"Accept":"application/json"}
INFO Response 200 GET /wd/hub/session/fc36e7a7-4909-4dfd-a853-6d769accb085/element/0/text (16ms) { state: 'success',
sessionId: 'fc36e7a7-4909-4dfd-a853-6d769accb085',
hCode: 972983271,
value: 'This site can’t be reached',
class: 'org.openqa.selenium.remote.Response',
status: 0 }
Nightwatch Conf
const SCREENSHOT_PATH = "./screenshots/";
const BIN_PATH = './node_modules/nightwatch/bin/';
``
// we use a nightwatch.conf.js file so we can include comments and helper functions
module.exports = {
"src_folders": [
"__tests__/e2e/specs"// Where you are storing your Nightwatch e2e tests
],
"output_folder": "./reports", // reports (test outcome) output by nightwatch
"selenium": { // downloaded by selenium-download module (see readme)
"start_process": false, // tells nightwatch to start/stop the selenium process
"server_path": "./node_modules/nightwatch/bin/selenium.jar",
"host": "127.0.0.1",
"port": 4444, // standard selenium port
"cli_args": { // chromedriver is downloaded by selenium-download (see readme)
"webdriver.chrome.driver" : "./node_modules/nightwatch/bin/chromedriver"
}
},
"test_settings": {
"default": {
"screenshots": {
"enabled": true, // if you want to keep screenshots
"path": './screenshots' // save screenshots here
},
"globals": {
"waitForConditionTimeout": 5000 // sometimes internet is slow so wait.
},
"desiredCapabilities": { // use Chrome as the default browser for tests
"browserName": "chrome",
"javascriptEnabled": true, // turn off to test progressive enhancement
"chromeOptions" : {
"args": ['--disable-web-security', 'no-sandbox', '--disable-async-dns']
}
}
},
"chrome": {
"desiredCapabilities": {
"browserName": "chrome",
"javascriptEnabled": true, // turn off to test progressive enhancement
"chromeOptions" : {
"args": ['--disable-web-security', 'no-sandbox', '--disable-async-dns']
}
}
}
},
"params": {
"baseUrl": "http://localhost:8080/",
}
}
Sorry for having the files attached instead of expanded on the comment but tho I have been using StackOverflow for a long time, this is my first request. StackOverflow
Apparently my localhost is not visible to the Selenium Chrome that is running. Needed to make my localhost accesible from outside my machine in order to get this running