I am using AWS lambda function with below code
'use strict';
constchildProcess= require("child_process");
constpath= require("path");
const backupDatabase = () => {
const scriptFilePath =path.resolve(__dirname, "./backup.sh");
return newPromise((resolve, reject) => {
childProcess.execFile(scriptFilePath, (error) => {
if (error) {
console.error(error);
resolve(false);
}
resolve(true);
});
});
};
module.exports.handler = async (event) => {
const isBackupSuccessful = await backupDatabase();
if (isBackupSuccessful) {
return {
status: "success",
message: "Database backup completed successfully!"
};
}
return {
status: "failed",
message: "Failed to backup the database! Check out the logs for more details"
};
};
The code above run's with in the docker container, tries to run the below backup script
#!/bin/bash
#
# Author: Bruno Coimbra <bbcoimbra#gmail.com>
#
# Backups database located in DB_HOST, DB_PORT, DB_NAME
# and can be accessed using DB_USER. Password should be
# located in $HOME/.pgpass and this file should be
# chmod 0600[1].
#
# Target bucket should be set in BACKUP_BUCKET variable.
#
# AWS credentials should be available as needed by aws-cli[2].
#
# Dependencies:
#
# * pg_dump executable (can be found in postgresql-client-<version> package)
# * aws-cli (with python environment configured execute 'pip install awscli')
#
#
# References
# [1] - http://www.postgresql.org/docs/9.3/static/libpq-pgpass.html
# [2] - http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-welcome.html
#
#
###############
### Variables
export AWS_ACCESS_KEY_ID=
export AWS_SECRET_ACCESS_KEY=
DB_HOST=
DB_PORT="5432"
DB_USER="postgres"
BACKUP_BUCKET=
###############
#
# **RISK ZONE** DON'T TOUCH below this line unless you know
# exactly what you are doing.
#
###############
set -e
export PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
### Variables
S3_BACKUP_BUCKET=${BACKUP_BUCKET:-test-db-backup-bucket}
TEMPFILE_PREFIX="db-$DB_NAME-backup"
TEMPFILE="$(mktemp -t $TEMPFILE_PREFIX-XXXXXXXX)"
DATE="$(date +%Y-%m-%d)"
TIMESTAMP="$(date +%s)"
BACKUPFILE="backup-$DB_NAME-$TIMESTAMP.sql.gz"
LOGTAG="DB $DB_NAME Backup"
### Validations
if [[ ! -r "$HOME/.pgpass" ]]; then
logger -t "$LOGTAG" "$0: Can't find database credentials. $HOME/.pgpass file isn't readable. Aborted."
exit 1
fi
if ! which pg_dump > /dev/null; then
logger -t "$LOGTAG" "$0: Can't find 'pg_dump' executable. Aborted."
exit 1
fi
if ! which aws > /dev/null; then
logger -t "$LOGTAG" "$0: Can't find 'aws cli' executable. Aborted."
exit 1
fi
logger -t "$LOGTAG" "$0: remove any previous dirty backup file"
rm -f /tmp/$TEMPFILE_PREFIX*
### Generate dump and compress it
logger -t "$LOGTAG" "Dumping Database..."
pg_dump -O -x -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -w "$DB_NAME" > "$TEMPFILE"
logger -t "$LOGTAG" "Dumped."
logger -t "$LOGTAG" "Compressing file..."
nice gzip -9 "$TEMPFILE"
logger -t "$LOGTAG" "Compressed."
mv "$TEMPFILE.gz" "$BACKUPFILE"
### Upload it to S3 Bucket and cleanup
logger -t "$LOGTAG" "Uploading '$BACKUPFILE' to S3..."
aws s3 cp "$BACKUPFILE" "s3://$S3_BACKUP_BUCKET/$DATE/$BACKUPFILE"
logger -t "$LOGTAG" "Uploaded."
logger -t "$LOGTAG" "Clean-up..."
rm -f $TEMPFILE
rm -f $BACKUPFILE
rm -f /tmp/$TEMPFILE_PREFIX*
logger -t "$LOGTAG" "Finished."
if [ $? -eq 0 ]; then
echo "script passed"
exit 0
else
echo "script failed"
exit 1
fi
I created a docker image with above app.js content and bakup.sh with the below docker file
ARG FUNCTION_DIR="/function"
FROM node:14-buster
RUN apt-get update && \
apt install -y \
g++ \
make \
cmake \
autoconf \
libtool \
wget \
openssh-client \
gnupg2
RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
echo "deb http://apt.postgresql.org/pub/repos/apt/ buster-pgdg main" | tee /etc/apt/sources.list.d/pgdg.list && \
apt-get update && apt-get -y install postgresql-client-12
ARG FUNCTION_DIR
RUN mkdir -p ${FUNCTION_DIR} && chmod -R 755 ${FUNCTION_DIR}
WORKDIR ${FUNCTION_DIR}
COPY package.json .
RUN npm install
COPY backup.sh .
RUN chmod +x backup.sh
COPY app.js .
ENTRYPOINT ["/usr/local/bin/npx", "aws-lambda-ric"]
CMD ["app.handler"]
I am running the docker container created with the image created from the above docker file
docker run -v ~/aws:/aws -it --rm -p 9000:8080 --entrypoint /aws/aws-lambda-rie backup-db:v1 /usr/local/bin/npx aws-lambda-ric app.handler
And trying to hit that docker container with below curl command
curl -XPOST "http://localhost:9000/2015-03-31/functions/function/invocations" -d '{}'
when I run curl command I am seeing the below error
29 Nov 2021 10:57:30,838 [INFO] (rapid) extensionsDisabledByLayer(/opt/disable-extensions-jwigqn8j) -> stat /opt/disable-extensions-jwigqn8j: no such file or directory
29 Nov 2021 10:57:30,838 [WARNING] (rapid) Cannot list external agents error=open /opt/extensions: no such file or directory
START RequestId: 053246ef-4687-438d-aade-a6794b917b79 Version: $LATEST
2021-11-29T10:57:30.912Z undefined INFO Executing 'app.handler' in function directory '/function'
2021-11-29T10:57:30.919Z undefined ERROR constchildProcess is not defined
29 Nov 2021 10:57:30,926 [WARNING] (rapid) First fatal error stored in appctx: Runtime.ExitError
29 Nov 2021 10:57:30,927 [WARNING] (rapid) Process 53(npx) exited: Runtime exited with error: exit status 1
29 Nov 2021 10:57:30,927 [ERROR] (rapid) Init failed error=Runtime exited with error: exit status 1 InvokeID=
29 Nov 2021 10:57:30,927 [WARNING] (rapid) Reset initiated: ReserveFail
29 Nov 2021 10:57:30,927 [WARNING] (rapid) Cannot list external agents error=open /opt/extensions: no such file or directory
Could someone help me with fixing the error ? My expected output is the message as described in the function, but am seeing the errors.
Thank you
Because they both do not exist. There is a typo on your first 2 lines:
constchildProcess= require("child_process");
constpath= require("path");
Should be:
const childProcess= require("child_process");
const path= require("path");
Related
I was trying to load the hyperledger grid smart contract to sawtooth network
after successfully running the docker-compose file with 4 active validator, i am getting following error
I have just logs one contract details rest 5 is giving me same error
I have also created a issue on sawtooth git repo click HERE
➜ ~ docker logs tnt-contract-builder
Response Body:
Link { link: "http://sawtooth-rest-api-default-0:8008/batch_statuses?id=958c6a9e7577f16c812a66fbdf0d860b74c36237808cfaabc7910548fb5c8c81451d6ad41a59a50d14af9a81b7569511443dd4dab6067a262a43a3517f52b270" }
Response Body:
StatusResponse {"data":[{"id": "958c6a9e7577f16c812a66fbdf0d860b74c36237808cfaabc7910548fb5c8c81451d6ad41a59a50d14af9a81b7569511443dd4dab6067a262a43a3517f52b270", "status": "PENDING", "invalid_transactions": []}], "link": "http://sawtooth-rest-api-default-0:8008/batch_statuses?id=958c6a9e7577f16c812a66fbdf0d860b74c36237808cfaabc7910548fb5c8c81451d6ad41a59a50d14af9a81b7569511443dd4dab6067a262a43a3517f52b270&wait=30"}
Response Body:
Link { link: "http://sawtooth-rest-api-default-0:8008/batch_statuses?id=dc410ca4b86c35a7ce37f5b82a0c3c7a8209b980ad4638e6120b47703ba0fe9f6b02f717b68c89fb640b6cfda0d3f2e81f80f8750163d417e61141c09fa5bd53" }
thread 'main' panicked at 'called `Result::unwrap()` on an `Err` value: Os { code: 11, kind: WouldBlock, message: "Resource temporarily unavailable" }', src/libcore/result.rs:1188:5
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace.
Response Body:
Link { link: "http://sawtooth-rest-api-default-0:8008/batch_statuses?id=eb421c54bb740a278fbd65ef37879a2fa193b897828cac1f250c8b8060899ce516b19c13fa63b1f081222402f2563dba7d707aace5c33594f587e7c8d5a051d9" }
thread 'main' panicked at 'called `Result::unwrap()` on an `Err` value: Os { code: 11, kind: WouldBlock, message: "Resource temporarily unavailable" }', src/libcore/result.rs:1188:5
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace.
Response Body:
Link { link: "http://sawtooth-rest-api-default-0:8008/batch_statuses?id=a6d84ff7056902908026264bc224c046290f99337770d2b6913a5f76fe8ed5814e5277dc5244b9564d4d12823dc56a2ae1f7c4986f6ac7344a19a20fc382e639" }
thread 'main' panicked at 'called `Result::unwrap()` on an `Err` value: Os { code: 11, kind: WouldBlock, message: "Resource temporarily unavailable" }', src/libcore/result.rs:1188:5
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace.
Response Body:
Link { link: "http://sawtooth-rest-api-default-0:8008/batch_statuses?id=a33bd2e40a34aac8399a8c39bfe073129a8abcaf23575247a521106af8eba43104aff6743efe26300a8ff3854dac20ec74492722d1f321aa02cf320131e83ef2" }
thread 'main' panicked at 'called `Result::unwrap()` on an `Err` value: Os { code: 11, kind: WouldBlock, message: "Resource temporarily unavailable" }', src/libcore/result.rs:1188:5
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace.
Response Body:
Link { link: "http://sawtooth-rest-api-default-0:8008/batch_statuses?id=9e6140c04c1bd0cf39e281754bf4f8508fc07ab40887250f75e2dd13f9eab3cd4bf7ccacb8d3de88e0f52258627adc709dceaa501d80a18268b9fac8c869fcfb" }
this is my docker-compose file for creating sawtooth network and to load grid smart contract
version: "3.6"
volumes:
contracts-shared:
grid-shared:
pbft-shared:
gridd-alpha:
templates-shared:
cache-shared:
services:
# ---== shared services ==---
sabre-cli:
image: hyperledger/sawtooth-sabre-cli:latest
volumes:
- contracts-shared:/usr/share/scar
- pbft-shared:/pbft-shared
container_name: sabre-cli
stop_signal: SIGKILL
tnt-contract-builder:
image: piashtanjin/tnt-contract-builder:latest
container_name: tnt-contract-builder
volumes:
- pbft-shared:/pbft-shared
entrypoint: |
bash -c "
while true; do curl -s http://sawtooth-rest-api-default-0:8008/state | grep -q head; if [ $$? -eq 0 ]; then break; fi; sleep 0.5; done;
sabre cr --create grid_track_and_trace --key /pbft-shared/validators/validator-0 --owner $$(cat /pbft-shared/validators/validator-0.pub) --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre upload --filename /tmp/track_and_trace.yaml --key /pbft-shared/validators/validator-0 --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre ns --create a43b46 --key /pbft-shared/validators/validator-0 --owner $$(cat /pbft-shared/validators/validator-0.pub) --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre perm a43b46 grid_track_and_trace --key /pbft-shared/validators/validator-0 --read --write --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre perm 621dee01 grid_track_and_trace --key /pbft-shared/validators/validator-0 --read --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre perm 621dee05 grid_track_and_trace --key /pbft-shared/validators/validator-0 --read --url http://sawtooth-rest-api-default-0:8008 --wait 30
echo '---------========= grid schema contract is loaded =========---------'
"
schema-contract-builder:
image: piashtanjin/schema-contract-builder:latest
container_name: schema-contract-builder
volumes:
- pbft-shared:/pbft-shared
entrypoint: |
bash -c "
while true; do curl -s http://sawtooth-rest-api-default-0:8008/state | grep -q head; if [ $$? -eq 0 ]; then break; fi; sleep 0.5; done;
sabre cr --create grid_schema --key /pbft-shared/validators/validator-0 --owner $$(cat /pbft-shared/validators/validator-0.pub) --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre upload --filename /tmp/schema.yaml --key /pbft-shared/validators/validator-0 --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre ns --create 621dee01 --key /pbft-shared/validators/validator-0 --owner $$(cat /pbft-shared/validators/validator-0.pub) --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre perm 621dee01 grid_schema --key /pbft-shared/validators/validator-0 --read --write --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre perm 621dee05 grid_schema --key /pbft-shared/validators/validator-0 --read --url http://sawtooth-rest-api-default-0:8008 --wait 30
echo '---------========= grid schema contract is loaded =========---------'
"
# pike-contract-builder:
pike-contract-builder:
image: piashtanjin/pike-contract-builder:latest
container_name: pike-contract-builder
volumes:
- pbft-shared:/pbft-shared
entrypoint: |
bash -c "
while true; do curl -s http://sawtooth-rest-api-default-0:8008/state | grep -q head; if [ $$? -eq 0 ]; then break; fi; sleep 0.5; done;
sabre cr --create grid_pike --key /pbft-shared/validators/validator-0 --owner $$(cat /pbft-shared/validators/validator-0.pub) --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre upload --filename /tmp/pike.yaml --key /pbft-shared/validators/validator-0 --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre ns --create 621dee05 --key /pbft-shared/validators/validator-0 --owner $$(cat /pbft-shared/validators/validator-0.pub) --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre perm 621dee05 grid_pike --key /pbft-shared/validators/validator-0 --read --write --url http://sawtooth-rest-api-default-0:8008 --wait 30
echo '---------========= pike contract is loaded =========---------'
"
product-contract-builder:
image: piashtanjin/product-contract-builder:latest
container_name: product-contract-builder
volumes:
- pbft-shared:/pbft-shared
entrypoint: |
bash -c "
while true; do curl -s http://sawtooth-rest-api-default-0:8008/state | grep -q head; if [ $$? -eq 0 ]; then break; fi; sleep 0.5; done;
sabre cr --create grid_product --key /pbft-shared/validators/validator-0 --owner $$(cat /pbft-shared/validators/validator-0.pub) --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre upload --filename /tmp/product.yaml --key /pbft-shared/validators/validator-0 --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre ns --create 621dee05 --key /pbft-shared/validators/validator-0 --owner $$(cat /pbft-shared/validators/validator-0.pub) --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre ns --create 621dee01 --key /pbft-shared/validators/validator-0 --owner $$(cat /pbft-shared/validators/validator-0.pub) --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre ns --create 621dee02 --key /pbft-shared/validators/validator-0 --owner $$(cat /pbft-shared/validators/validator-0.pub) --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre perm 621dee05 grid_product --key /pbft-shared/validators/validator-0 --read --write --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre perm 621dee01 grid_product --key /pbft-shared/validators/validator-0 --read --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre perm 621dee02 grid_product --key /pbft-shared/validators/validator-0 --read --write --url http://sawtooth-rest-api-default-0:8008 --wait 30
echo '---------========= grid_product contract is loaded =========---------'
"
location-contract-builder:
image: piashtanjin/location-contract-builder:latest
container_name: location-contract-builder
volumes:
- pbft-shared:/pbft-shared
entrypoint: |
bash -c "
while true; do curl -s http://sawtooth-rest-api-default-0:8008/state | grep -q head; if [ $$? -eq 0 ]; then break; fi; sleep 0.5; done;
sabre cr --create grid_location --key /pbft-shared/validators/validator-0 --owner $$(cat /pbft-shared/validators/validator-0.pub) --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre upload --filename /tmp/location.yaml --key /pbft-shared/validators/validator-0 --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre ns --create 621dee04 --key /pbft-shared/validators/validator-0 --owner $$(cat /pbft-shared/validators/validator-0.pub) --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre perm 621dee05 grid_location --key /pbft-shared/validators/validator-0 --read --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre perm 621dee01 grid_location --key /pbft-shared/validators/validator-0 --read --write --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre perm 621dee04 grid_location --key /pbft-shared/validators/validator-0 --read --write --url http://sawtooth-rest-api-default-0:8008 --wait 30
echo '---------========= grid_location contract is loaded =========---------'
"
purchase-order-contract-builder:
image: piashtanjin/purchase-order-contract-builder:latest
container_name: purchase-order-contract-builder
volumes:
- pbft-shared:/pbft-shared
entrypoint: |
bash -c "
while true; do curl -s http://sawtooth-rest-api-default-0:8008/state | grep -q head; if [ $$? -eq 0 ]; then break; fi; sleep 0.5; done;
sabre cr --create grid_purchase_order --key /pbft-shared/validators/validator-0 --owner $$(cat /pbft-shared/validators/validator-0.pub) --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre upload --filename /tmp/purchase_order.yaml --key /pbft-shared/validators/validator-0 --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre ns --create 621dee06 --key /pbft-shared/validators/validator --owner $$(cat /pbft-shared/validators/validator-0.pub) --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre perm 621dee05 grid_purchase_order --key /pbft-shared/validators/validator-0 --read --url http://sawtooth-rest-api-default-0:8008 --wait 30
sabre perm 621dee06 grid_purchase_order --key /pbft-shared/validators/validator-0 --read --write --url http://sawtooth-rest-api-default-0:8008 --wait 30
echo '---------========= grid_purchase_order contract is loaded =========---------'
"
# if [ ! -e sabre-admin.batch ]; then
# sawset proposal create \
# -k /root/.sawtooth/keys/my_key.priv \
# sawtooth.swa.administrators=$$(cat /pbft-shared/validators/validator-0.pub) \
# -o sabre-admin.batch
# sawadm genesis sabre-admin.batch
validator-0:
image: hyperledger/sawtooth-validator:nightly
container_name: sawtooth-validator-default-0
expose:
- 4004
- 5050
- 8800
ports:
- "4004:4004"
volumes:
- pbft-shared:/pbft-shared
command: |
bash -c "
if [ -e /pbft-shared/validators/validator-0.priv ]; then
cp /pbft-shared/validators/validator-0.pub /etc/sawtooth/keys/validator.pub
cp /pbft-shared/validators/validator-0.priv /etc/sawtooth/keys/validator.priv
fi &&
if [ ! -e /etc/sawtooth/keys/validator.priv ]; then
sawadm keygen
mkdir -p /pbft-shared/validators || true
cp /etc/sawtooth/keys/validator.pub /pbft-shared/validators/validator-0.pub
cp /etc/sawtooth/keys/validator.priv /pbft-shared/validators/validator-0.priv
fi &&
if [ ! -e config-genesis.batch ]; then
sawset genesis -k /etc/sawtooth/keys/validator.priv -o config-genesis.batch
fi &&
while [[ ! -f /pbft-shared/validators/validator-1.pub || \
! -f /pbft-shared/validators/validator-2.pub || \
! -f /pbft-shared/validators/validator-3.pub || \
! -f /pbft-shared/validators/validator-4.pub ]];
do sleep 1; done
echo sawtooth.consensus.pbft.members=\\['\"'$$(cat /pbft-shared/validators/validator-0.pub)'\"','\"'$$(cat /pbft-shared/validators/validator-1.pub)'\"','\"'$$(cat /pbft-shared/validators/validator-2.pub)'\"','\"'$$(cat /pbft-shared/validators/validator-3.pub)'\"','\"'$$(cat /pbft-shared/validators/validator-4.pub)'\"'\\] &&
if [ ! -e /root/.sawtooth/keys/my_key.priv ]; then
sawtooth keygen my_key
fi &&
if [ ! -e config.batch ]; then
sawset proposal create \
-k /etc/sawtooth/keys/validator.priv \
sawtooth.consensus.algorithm.name=pbft \
sawtooth.consensus.algorithm.version=1.0 \
sawtooth.validator.transaction_families='[{"family": "sabre", "version": "0.5"}, {"family":"sawtoo", "version":"1.0"}, {"family":"xo", "version":"1.0"}]' \
sawtooth.identity.allowed_keys=\\['\"'$$(cat /pbft-shared/validators/validator-0.pub)'\"','\"'$$(cat /pbft-shared/validators/validator-1.pub)'\"','\"'$$(cat /pbft-shared/validators/validator-2.pub)'\"','\"'$$(cat /pbft-shared/validators/validator-3.pub)'\"','\"'$$(cat /pbft-shared/validators/validator-4.pub)'\"'\\] \
sawtooth.swa.administrators=\\['\"'$$(cat /pbft-shared/validators/validator-0.pub)'\"','\"'$$(cat /pbft-shared/validators/validator-1.pub)'\"','\"'$$(cat /pbft-shared/validators/validator-2.pub)'\"','\"'$$(cat /pbft-shared/validators/validator-3.pub)'\"','\"'$$(cat /pbft-shared/validators/validator-4.pub)'\"'\\] \
sawtooth.consensus.pbft.members=\\['\"'$$(cat /pbft-shared/validators/validator-0.pub)'\"','\"'$$(cat /pbft-shared/validators/validator-1.pub)'\"','\"'$$(cat /pbft-shared/validators/validator-2.pub)'\"','\"'$$(cat /pbft-shared/validators/validator-3.pub)'\"','\"'$$(cat /pbft-shared/validators/validator-4.pub)'\"'\\] \
sawtooth.publisher.max_batches_per_block=1200 \
-o config.batch
fi &&
if [ ! -e /var/lib/sawtooth/genesis.batch ]; then
sawadm genesis config-genesis.batch config.batch
fi &&
sawtooth-validator -vv \
--endpoint tcp://validator-0:8800 \
--bind component:tcp://eth0:4004 \
--bind consensus:tcp://eth0:5050 \
--bind network:tcp://eth0:8800 \
--scheduler parallel \
--peering static \
--maximum-peer-connectivity 10000
"
validator-1:
image: hyperledger/sawtooth-validator:nightly
container_name: sawtooth-validator-default-1
expose:
- 4004
- 5050
- 8800
volumes:
- pbft-shared:/pbft-shared
command: |
bash -c "
if [ -e /pbft-shared/validators/validator-1.priv ]; then
cp /pbft-shared/validators/validator-1.pub /etc/sawtooth/keys/validator.pub
cp /pbft-shared/validators/validator-1.priv /etc/sawtooth/keys/validator.priv
fi &&
if [ ! -e /etc/sawtooth/keys/validator.priv ]; then
sawadm keygen
mkdir -p /pbft-shared/validators || true
cp /etc/sawtooth/keys/validator.pub /pbft-shared/validators/validator-1.pub
cp /etc/sawtooth/keys/validator.priv /pbft-shared/validators/validator-1.priv
fi &&
sawtooth keygen my_key &&
sawtooth-validator -vv \
--endpoint tcp://validator-1:8800 \
--bind component:tcp://eth0:4004 \
--bind consensus:tcp://eth0:5050 \
--bind network:tcp://eth0:8800 \
--scheduler parallel \
--peering static \
--maximum-peer-connectivity 10000 \
--peers tcp://validator-0:8800
"
validator-2:
image: hyperledger/sawtooth-validator:nightly
container_name: sawtooth-validator-default-2
expose:
- 4004
- 5050
- 8800
volumes:
- pbft-shared:/pbft-shared
command: |
bash -c "
if [ -e /pbft-shared/validators/validator-2.priv ]; then
cp /pbft-shared/validators/validator-2.pub /etc/sawtooth/keys/validator.pub
cp /pbft-shared/validators/validator-2.priv /etc/sawtooth/keys/validator.priv
fi &&
if [ ! -e /etc/sawtooth/keys/validator.priv ]; then
sawadm keygen
mkdir -p /pbft-shared/validators || true
cp /etc/sawtooth/keys/validator.pub /pbft-shared/validators/validator-2.pub
cp /etc/sawtooth/keys/validator.priv /pbft-shared/validators/validator-2.priv
fi &&
sawtooth keygen my_key &&
sawtooth-validator -vv \
--endpoint tcp://validator-2:8800 \
--bind component:tcp://eth0:4004 \
--bind consensus:tcp://eth0:5050 \
--bind network:tcp://eth0:8800 \
--scheduler parallel \
--peering static \
--maximum-peer-connectivity 10000 \
--peers tcp://validator-0:8800 \
--peers tcp://validator-1:8800
"
validator-3:
image: hyperledger/sawtooth-validator:nightly
container_name: sawtooth-validator-default-3
expose:
- 4004
- 5050
- 8800
volumes:
- pbft-shared:/pbft-shared
command: |
bash -c "
if [ -e /pbft-shared/validators/validator-3.priv ]; then
cp /pbft-shared/validators/validator-3.pub /etc/sawtooth/keys/validator.pub
cp /pbft-shared/validators/validator-3.priv /etc/sawtooth/keys/validator.priv
fi &&
if [ ! -e /etc/sawtooth/keys/validator.priv ]; then
sawadm keygen
mkdir -p /pbft-shared/validators || true
cp /etc/sawtooth/keys/validator.pub /pbft-shared/validators/validator-3.pub
cp /etc/sawtooth/keys/validator.priv /pbft-shared/validators/validator-3.priv
fi &&
sawtooth keygen my_key &&
sawtooth-validator -vv \
--endpoint tcp://validator-3:8800 \
--bind component:tcp://eth0:4004 \
--bind consensus:tcp://eth0:5050 \
--bind network:tcp://eth0:8800 \
--scheduler parallel \
--peering static \
--maximum-peer-connectivity 10000 \
--peers tcp://validator-0:8800 \
--peers tcp://validator-1:8800 \
--peers tcp://validator-2:8800
"
validator-4:
image: hyperledger/sawtooth-validator:nightly
container_name: sawtooth-validator-default-4
expose:
- 4004
- 5050
- 8800
volumes:
- pbft-shared:/pbft-shared
command: |
bash -c "
if [ -e /pbft-shared/validators/validator-4.priv ]; then
cp /pbft-shared/validators/validator-4.pub /etc/sawtooth/keys/validator.pub
cp /pbft-shared/validators/validator-4.priv /etc/sawtooth/keys/validator.priv
fi &&
if [ ! -e /etc/sawtooth/keys/validator.priv ]; then
sawadm keygen
mkdir -p /pbft-shared/validators || true
cp /etc/sawtooth/keys/validator.pub /pbft-shared/validators/validator-4.pub
cp /etc/sawtooth/keys/validator.priv /pbft-shared/validators/validator-4.priv
fi &&
sawtooth keygen my_key &&
sawtooth-validator -vv \
--endpoint tcp://validator-4:8800 \
--bind component:tcp://eth0:4004 \
--bind consensus:tcp://eth0:5050 \
--bind network:tcp://eth0:8800 \
--scheduler parallel \
--peering static \
--maximum-peer-connectivity 10000 \
--peers tcp://validator-0:8800 \
--peers tcp://validator-1:8800 \
--peers tcp://validator-2:8800 \
--peers tcp://validator-3:8800
"
sawtooth-rest-api:
image: hyperledger/sawtooth-rest-api:latest
container_name: sawtooth-rest-api-default-0
expose:
- 8008
ports:
- "8008:8008"
depends_on:
- validator-0
command: |
bash -c "
sawtooth-rest-api \
--connect tcp://validator-0:4004 \
--bind sawtooth-rest-api:8008
"
stop_signal: SIGKILL
sawtooth-rest-api1:
image: hyperledger/sawtooth-rest-api:nightly
container_name: sawtooth-rest-api-1
expose:
- 8008
depends_on:
- validator-0
command: |
bash -c "
sawtooth-rest-api -v --connect tcp://validator-1:4004 --bind sawtooth-rest-api1:8008
"
stop_signal: SIGKILL
sawtooth-rest-api2:
image: hyperledger/sawtooth-rest-api:nightly
container_name: sawtooth-rest-api-2
expose:
- 8008
depends_on:
- validator-0
command: |
bash -c "
sawtooth-rest-api -v --connect tcp://validator-2:4004 --bind sawtooth-rest-api2:8008
"
stop_signal: SIGKILL
sawtooth-rest-api3:
image: hyperledger/sawtooth-rest-api:nightly
container_name: sawtooth-rest-api-3
expose:
- 8008
depends_on:
- validator-0
command: |
bash -c "
sawtooth-rest-api -v --connect tcp://validator-3:4004 --bind sawtooth-rest-api3:8008
"
stop_signal: SIGKILL
sawtooth-rest-api4:
image: hyperledger/sawtooth-rest-api:nightly
container_name: sawtooth-rest-api-4
expose:
- 8008
depends_on:
- validator-0
command: |
bash -c "
sawtooth-rest-api -v --connect tcp://validator-4:4004 --bind sawtooth-rest-api4:8008
"
stop_signal: SIGKILL
sawtooth-settings-tp:
image: hyperledger/sawtooth-settings-tp:latest
container_name: sawtooth-settings-tp
expose:
- 4004
command: settings-tp -v -C tcp://validator-0:4004
stop_signal: SIGKILL
sawtooth-settings-tp-1:
image: hyperledger/sawtooth-settings-tp:latest
container_name: sawtooth-settings-tp-1
expose:
- 4004
command: settings-tp -v -C tcp://validator-1:4004
stop_signal: SIGKILL
sawtooth-settings-tp-2:
image: hyperledger/sawtooth-settings-tp:latest
container_name: sawtooth-settings-tp-2
expose:
- 4004
command: settings-tp -v -C tcp://validator-2:4004
stop_signal: SIGKILL
sawtooth-settings-tp-3:
image: hyperledger/sawtooth-settings-tp:latest
container_name: sawtooth-settings-tp-3
expose:
- 4004
command: settings-tp -v -C tcp://validator-3:4004
stop_signal: SIGKILL
sawtooth-settings-tp-4:
image: hyperledger/sawtooth-settings-tp:latest
container_name: sawtooth-settings-tp-4
expose:
- 4004
command: settings-tp -v -C tcp://validator-4:4004
stop_signal: SIGKILL
sabre-tp:
image: hyperledger/sawtooth-sabre-tp:0.8
container_name: sawtooth-sabre-tp
depends_on:
- validator-0
entrypoint: sawtooth-sabre -vv --connect tcp://validator-0:4004
sawtooth-client:
image: hyperledger/sawtooth-shell:nightly
container_name: sawtooth-shell
volumes:
- pbft-shared:/pbft-shared
depends_on:
- validator-0
command: |
bash -c "
sawtooth keygen &&
tail -f /dev/null
"
stop_signal: SIGKILL
pbft-0:
image: hyperledger/sawtooth-pbft-engine:nightly
container_name: sawtooth-pbft-engine-default-0
command: pbft-engine -vv --connect tcp://validator-0:5050
stop_signal: SIGKILL
pbft-1:
image: hyperledger/sawtooth-pbft-engine:nightly
container_name: sawtooth-pbft-engine-default-1
command: pbft-engine -vv --connect tcp://validator-1:5050
stop_signal: SIGKILL
pbft-2:
image: hyperledger/sawtooth-pbft-engine:nightly
container_name: sawtooth-pbft-engine-default-2
command: pbft-engine -vv --connect tcp://validator-2:5050
stop_signal: SIGKILL
pbft-3:
image: hyperledger/sawtooth-pbft-engine:nightly
container_name: sawtooth-pbft-engine-default-3
command: pbft-engine -vv --connect tcp://validator-3:5050
stop_signal: SIGKILL
pbft-4:
image: hyperledger/sawtooth-pbft-engine:nightly
container_name: sawtooth-pbft-engine-default-4
command: pbft-engine -vv --connect tcp://validator-4:5050
stop_signal: SIGKILL
# ---== alpha node ==---
db-alpha:
image: postgres
container_name: db-alpha
hostname: db-alpha
restart: always
expose:
- 5432
environment:
POSTGRES_USER: grid
POSTGRES_PASSWORD: grid_example
POSTGRES_DB: grid
gridd-alpha:
image: gridd
container_name: gridd-alpha
hostname: gridd-alpha
build:
context: ../..
dockerfile: daemon/Dockerfile
args:
- REPO_VERSION=${REPO_VERSION}
- CARGO_ARGS= --features experimental
volumes:
- contracts-shared:/usr/share/scar
- pbft-shared:/pbft-shared
- gridd-alpha:/etc/grid/keys
- cache-shared:/var/cache/grid
expose:
- 8080
ports:
- "8080:8080"
environment:
GRID_DAEMON_KEY: "alpha-agent"
GRID_DAEMON_ENDPOINT: "http://gridd-alpha:8080"
entrypoint: |
bash -c "
# we need to wait for the db to have started.
until PGPASSWORD=grid_example psql -h db-alpha -U grid -c '\q' > /dev/null 2>&1; do
>&2 echo \"Database is unavailable - sleeping\"
sleep 1
done
grid keygen --skip && \
grid keygen --system --skip && \
grid -vv database migrate \
-C postgres://grid:grid_example#db-alpha/grid &&
gridd -vv -b 0.0.0.0:8080 -k root -C tcp://validator-0:4004 \
--database-url postgres://grid:grid_example#db-alpha/grid
"
I am attempting to bitbake an image for a de0-nano-soc board with the RT patch and I am getting fetch errors.
This is the settings in my local.conf
# Real time patch needs 4.9 toolchain.
MACHINE ?= "cyclone5"
GCCVERSION = "linaro-4.9"
SDKGCCVERSION = "linaro-4.9"
DEFAULTTUNE = "cortexa9hf-neon"
PREFERRED_PROVIDER_virtual/kernel = "linux-altera-ltsi-rt"
PREFERRED_VERSION_linux-altera-ltsi-rt = "4.14.126%"
UBOOT_CONFIG = "de0-nano-soc"
UBOOT_EXTLINUX_FDT_default = "../socfpga_cyclone5_de0_nano_soc.dtb"
#
# Additional packages/recipes utilized from other layers should be included here.
#
IMAGE_INSTALL_append += "openssh apt dpkg nano nodejs zeromq ninja protobuf"
and my bblayers.conf
BBLAYERS ?= " \
/home/asei/Documents/yocto-kirkstone/poky/meta \
/home/asei/Documents/yocto-kirkstone/poky/meta-poky \
/home/asei/Documents/yocto-kirkstone/poky/meta-yocto-bsp \
/home/asei/Documents/yocto-kirkstone/poky/meta-altera \
/home/asei/Documents/yocto-kirkstone/poky/meta-openembedded/meta-oe \
/home/asei/Documents/yocto-kirkstone/poky/meta-openembedded/meta-python \
/home/asei/Documents/yocto-kirkstone/poky/meta-openembedded/meta-networking \
/home/asei/Documents/yocto-kirkstone/poky/meta-openembedded/meta-webserver \
"
error log states this:
DEBUG: For url ['git', 'github.com', '/altera-opensource/linux-socfpga.git', '', '', OrderedDict([('protocol', 'https'), ('branch', 'socfpga-4.14.126-ltsi-rt')])] comparing ['git', '.*', '/.*', '', '', OrderedDict()] to ['git', 'HOST', '/PATH', '', '', OrderedDict([('protocol', 'https')])]
DEBUG: Fetching http://downloads.yoctoproject.org/mirror/sources/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz using command '/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate -O /home/asei/Documents/yocto-de0nano/build/downloads/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz.tmp -P /home/asei/Documents/yocto-de0nano/build/downloads 'http://downloads.yoctoproject.org/mirror/sources/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz''
DEBUG: Fetcher accessed the network with the command /usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate -O /home/asei/Documents/yocto-de0nano/build/downloads/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz.tmp -P /home/asei/Documents/yocto-de0nano/build/downloads 'http://downloads.yoctoproject.org/mirror/sources/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz'
DEBUG: Running export PSEUDO_DISABLED=1; export DBUS_SESSION_BUS_ADDRESS="unix:path=/run/user/1000/bus"; export SSH_AGENT_PID="1205"; export SSH_AUTH_SOCK="/run/user/1000/keyring/ssh"; export PATH="/home/asei/Documents/yocto-de0nano/build/tmp/sysroots-uninative/x86_64-linux/usr/bin:/home/asei/Documents/yocto-de0nano/poky/scripts:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/usr/bin/arm-poky-linux-gnueabi:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot/usr/bin/crossscripts:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/usr/sbin:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/usr/bin:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/sbin:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/bin:/home/asei/Documents/yocto-de0nano/poky/bitbake/bin:/home/asei/Documents/yocto-de0nano/build/tmp/hosttools"; export HOME="/home/asei"; /usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate -O /home/asei/Documents/yocto-de0nano/build/downloads/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz.tmp -P /home/asei/Documents/yocto-de0nano/build/downloads 'http://downloads.yoctoproject.org/mirror/sources/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz' --progress=dot -v
--2022-11-21 23:17:34-- http://downloads.yoctoproject.org/mirror/sources/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz
Resolving downloads.yoctoproject.org (downloads.yoctoproject.org)... 198.145.29.62
Connecting to downloads.yoctoproject.org (downloads.yoctoproject.org)|198.145.29.62|:80... connected.
HTTP request sent, awaiting response... 404 Not Found
2022-11-21 23:17:34 ERROR 404: Not Found.
DEBUG: Mirror fetch failure for url http://downloads.yoctoproject.org/mirror/sources/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz (original url: git://github.com/altera-opensource/linux-socfpga.git;protocol=https;branch=socfpga-4.14.126-ltsi-rt)
DEBUG: Fetcher failure: Fetch command export PSEUDO_DISABLED=1; export DBUS_SESSION_BUS_ADDRESS="unix:path=/run/user/1000/bus"; export SSH_AGENT_PID="1205"; export SSH_AUTH_SOCK="/run/user/1000/keyring/ssh"; export PATH="/home/asei/Documents/yocto-de0nano/build/tmp/sysroots-uninative/x86_64-linux/usr/bin:/home/asei/Documents/yocto-de0nano/poky/scripts:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/usr/bin/arm-poky-linux-gnueabi:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot/usr/bin/crossscripts:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/usr/sbin:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/usr/bin:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/sbin:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/bin:/home/asei/Documents/yocto-de0nano/poky/bitbake/bin:/home/asei/Documents/yocto-de0nano/build/tmp/hosttools"; export HOME="/home/asei"; /usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate -O /home/asei/Documents/yocto-de0nano/build/downloads/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz.tmp -P /home/asei/Documents/yocto-de0nano/build/downloads 'http://downloads.yoctoproject.org/mirror/sources/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz' --progress=dot -v failed with exit code 8, no output
DEBUG: Fetching http://sources.openembedded.org/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz using command '/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate -O /home/asei/Documents/yocto-de0nano/build/downloads/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz.tmp -P /home/asei/Documents/yocto-de0nano/build/downloads 'http://sources.openembedded.org/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz''
DEBUG: Fetcher accessed the network with the command /usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate -O /home/asei/Documents/yocto-de0nano/build/downloads/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz.tmp -P /home/asei/Documents/yocto-de0nano/build/downloads 'http://sources.openembedded.org/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz'
DEBUG: Running export PSEUDO_DISABLED=1; export DBUS_SESSION_BUS_ADDRESS="unix:path=/run/user/1000/bus"; export SSH_AGENT_PID="1205"; export SSH_AUTH_SOCK="/run/user/1000/keyring/ssh"; export PATH="/home/asei/Documents/yocto-de0nano/build/tmp/sysroots-uninative/x86_64-linux/usr/bin:/home/asei/Documents/yocto-de0nano/poky/scripts:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/usr/bin/arm-poky-linux-gnueabi:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot/usr/bin/crossscripts:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/usr/sbin:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/usr/bin:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/sbin:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/bin:/home/asei/Documents/yocto-de0nano/poky/bitbake/bin:/home/asei/Documents/yocto-de0nano/build/tmp/hosttools"; export HOME="/home/asei"; /usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate -O /home/asei/Documents/yocto-de0nano/build/downloads/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz.tmp -P /home/asei/Documents/yocto-de0nano/build/downloads 'http://sources.openembedded.org/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz' --progress=dot -v
--2022-11-21 23:17:34-- http://sources.openembedded.org/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz
Resolving sources.openembedded.org (sources.openembedded.org)... 198.145.29.27
Connecting to sources.openembedded.org (sources.openembedded.org)|198.145.29.27|:80... connected.
HTTP request sent, awaiting response... 404 Not Found
2022-11-21 23:17:34 ERROR 404: Not Found.
DEBUG: Mirror fetch failure for url http://sources.openembedded.org/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz (original url: git://github.com/altera-opensource/linux-socfpga.git;protocol=https;branch=socfpga-4.14.126-ltsi-rt)
DEBUG: Fetcher failure: Fetch command export PSEUDO_DISABLED=1; export DBUS_SESSION_BUS_ADDRESS="unix:path=/run/user/1000/bus"; export SSH_AGENT_PID="1205"; export SSH_AUTH_SOCK="/run/user/1000/keyring/ssh"; export PATH="/home/asei/Documents/yocto-de0nano/build/tmp/sysroots-uninative/x86_64-linux/usr/bin:/home/asei/Documents/yocto-de0nano/poky/scripts:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/usr/bin/arm-poky-linux-gnueabi:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot/usr/bin/crossscripts:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/usr/sbin:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/usr/bin:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/sbin:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/bin:/home/asei/Documents/yocto-de0nano/poky/bitbake/bin:/home/asei/Documents/yocto-de0nano/build/tmp/hosttools"; export HOME="/home/asei"; /usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate -O /home/asei/Documents/yocto-de0nano/build/downloads/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz.tmp -P /home/asei/Documents/yocto-de0nano/build/downloads 'http://sources.openembedded.org/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz' --progress=dot -v failed with exit code 8, no output
DEBUG: Fetching https://mirrors.kernel.org/yocto-sources/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz using command '/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate -O /home/asei/Documents/yocto-de0nano/build/downloads/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz.tmp -P /home/asei/Documents/yocto-de0nano/build/downloads 'https://mirrors.kernel.org/yocto-sources/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz''
DEBUG: Fetcher accessed the network with the command /usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate -O /home/asei/Documents/yocto-de0nano/build/downloads/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz.tmp -P /home/asei/Documents/yocto-de0nano/build/downloads 'https://mirrors.kernel.org/yocto-sources/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz'
DEBUG: Running export PSEUDO_DISABLED=1; export DBUS_SESSION_BUS_ADDRESS="unix:path=/run/user/1000/bus"; export SSH_AGENT_PID="1205"; export SSH_AUTH_SOCK="/run/user/1000/keyring/ssh"; export PATH="/home/asei/Documents/yocto-de0nano/build/tmp/sysroots-uninative/x86_64-linux/usr/bin:/home/asei/Documents/yocto-de0nano/poky/scripts:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/usr/bin/arm-poky-linux-gnueabi:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot/usr/bin/crossscripts:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/usr/sbin:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/usr/bin:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/sbin:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/bin:/home/asei/Documents/yocto-de0nano/poky/bitbake/bin:/home/asei/Documents/yocto-de0nano/build/tmp/hosttools"; export HOME="/home/asei"; /usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate -O /home/asei/Documents/yocto-de0nano/build/downloads/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz.tmp -P /home/asei/Documents/yocto-de0nano/build/downloads 'https://mirrors.kernel.org/yocto-sources/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz' --progress=dot -v
--2022-11-21 23:17:34-- https://mirrors.kernel.org/yocto-sources/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz
Resolving mirrors.kernel.org (mirrors.kernel.org)... 139.178.88.99, 2604:1380:45e3:2400::1
Connecting to mirrors.kernel.org (mirrors.kernel.org)|139.178.88.99|:443... connected.
HTTP request sent, awaiting response... 404 Not Found
2022-11-21 23:17:34 ERROR 404: Not Found.
DEBUG: Mirror fetch failure for url https://mirrors.kernel.org/yocto-sources/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz (original url: git://github.com/altera-opensource/linux-socfpga.git;protocol=https;branch=socfpga-4.14.126-ltsi-rt)
DEBUG: Fetcher failure: Fetch command export PSEUDO_DISABLED=1; export DBUS_SESSION_BUS_ADDRESS="unix:path=/run/user/1000/bus"; export SSH_AGENT_PID="1205"; export SSH_AUTH_SOCK="/run/user/1000/keyring/ssh"; export PATH="/home/asei/Documents/yocto-de0nano/build/tmp/sysroots-uninative/x86_64-linux/usr/bin:/home/asei/Documents/yocto-de0nano/poky/scripts:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/usr/bin/arm-poky-linux-gnueabi:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot/usr/bin/crossscripts:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/usr/sbin:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/usr/bin:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/sbin:/home/asei/Documents/yocto-de0nano/build/tmp/work/cyclone5-poky-linux-gnueabi/linux-altera-ltsi-rt/4.14.126-ltsi-rt+gitAUTOINC+652711a08d-r0/recipe-sysroot-native/bin:/home/asei/Documents/yocto-de0nano/poky/bitbake/bin:/home/asei/Documents/yocto-de0nano/build/tmp/hosttools"; export HOME="/home/asei"; /usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate -O /home/asei/Documents/yocto-de0nano/build/downloads/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz.tmp -P /home/asei/Documents/yocto-de0nano/build/downloads 'https://mirrors.kernel.org/yocto-sources/git2_github.com.altera-opensource.linux-socfpga.git.tar.gz' --progress=dot -v failed with exit code 8, no output
ERROR: Fetcher failure: Unable to find revision 652711a08dbe255d0aa3c2dd7231d6c6bfc61817 in branch socfpga-4.14.126-ltsi-rt even from upstream
ERROR: Bitbake Fetcher Error: FetchError('Unable to fetch URL from any source.', 'git://github.com/altera-opensource/linux-socfpga.git;protocol=https;branch=socfpga-4.14.126-ltsi-rt')
DEBUG: Python function base_do_fetch finished
DEBUG: Python function do_fetch finished
I looked for the linux-socfpga github and there doesnt seem to be a branch for any rt patch?
I am using AWS lambda function with below code
'use strict';
var newPromise = require('es6-promise').Promise;
const childProcess= require("child_process");
const path= require("path");
const backupDatabase = () => {
const scriptFilePath =path.resolve(__dirname, "./backup.sh");
return newPromise((resolve, reject) => {
childProcess.execFile(scriptFilePath, (error) => {
if (error) {
console.error(error);
resolve(false);
}
resolve(true);
});
});
};
module.exports.handler = async (event) => {
const isBackupSuccessful = await backupDatabase();
if (isBackupSuccessful) {
return {
status: "success",
message: "Database backup completed successfully!"
};
}
return {
status: "failed",
message: "Failed to backup the database! Check out the logs for more details"
};
};
The code above run's with in the docker container, tries to run the below backup script
#!/bin/bash
#
# Author: Bruno Coimbra <bbcoimbra#gmail.com>
#
# Backups database located in DB_HOST, DB_PORT, DB_NAME
# and can be accessed using DB_USER. Password should be
# located in $HOME/.pgpass and this file should be
# chmod 0600[1].
#
# Target bucket should be set in BACKUP_BUCKET variable.
#
# AWS credentials should be available as needed by aws-cli[2].
#
# Dependencies:
#
# * pg_dump executable (can be found in postgresql-client-<version> package)
# * aws-cli (with python environment configured execute 'pip install awscli')
#
#
# References
# [1] - http://www.postgresql.org/docs/9.3/static/libpq-pgpass.html
# [2] - http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-welcome.html
#
#
###############
### Variables
export AWS_ACCESS_KEY_ID=
export AWS_SECRET_ACCESS_KEY=
DB_HOST=
DB_PORT="5432"
DB_USER="postgres"
BACKUP_BUCKET=
###############
#
# **RISK ZONE** DON'T TOUCH below this line unless you know
# exactly what you are doing.
#
###############
set -e
export PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
### Variables
S3_BACKUP_BUCKET=${BACKUP_BUCKET:-test-db-backup-bucket}
TEMPFILE_PREFIX="db-$DB_NAME-backup"
TEMPFILE="$(mktemp -t $TEMPFILE_PREFIX-XXXXXXXX)"
DATE="$(date +%Y-%m-%d)"
TIMESTAMP="$(date +%s)"
BACKUPFILE="backup-$DB_NAME-$TIMESTAMP.sql.gz"
LOGTAG="DB $DB_NAME Backup"
### Validations
if [[ ! -r "$HOME/.pgpass" ]]; then
logger -t "$LOGTAG" "$0: Can't find database credentials. $HOME/.pgpass file isn't readable. Aborted."
exit 1
fi
if ! which pg_dump > /dev/null; then
logger -t "$LOGTAG" "$0: Can't find 'pg_dump' executable. Aborted."
exit 1
fi
if ! which aws > /dev/null; then
logger -t "$LOGTAG" "$0: Can't find 'aws cli' executable. Aborted."
exit 1
fi
logger -t "$LOGTAG" "$0: remove any previous dirty backup file"
rm -f /tmp/$TEMPFILE_PREFIX*
### Generate dump and compress it
logger -t "$LOGTAG" "Dumping Database..."
pg_dump -O -x -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -w "$DB_NAME" > "$TEMPFILE"
logger -t "$LOGTAG" "Dumped."
logger -t "$LOGTAG" "Compressing file..."
nice gzip -9 "$TEMPFILE"
logger -t "$LOGTAG" "Compressed."
mv "$TEMPFILE.gz" "$BACKUPFILE"
### Upload it to S3 Bucket and cleanup
logger -t "$LOGTAG" "Uploading '$BACKUPFILE' to S3..."
aws s3 cp "$BACKUPFILE" "s3://$S3_BACKUP_BUCKET/$DATE/$BACKUPFILE"
logger -t "$LOGTAG" "Uploaded."
logger -t "$LOGTAG" "Clean-up..."
rm -f $TEMPFILE
rm -f $BACKUPFILE
rm -f /tmp/$TEMPFILE_PREFIX*
logger -t "$LOGTAG" "Finished."
if [ $? -eq 0 ]; then
echo "script passed"
exit 0
else
echo "script failed"
exit 1
fi
I created a docker image with above app.js content and bakup.sh with the below docker file
ARG FUNCTION_DIR="/function"
FROM node:14-buster
RUN apt-get update && \
apt install -y \
g++ \
make \
cmake \
autoconf \
libtool \
wget \
openssh-client \
gnupg2
RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
echo "deb http://apt.postgresql.org/pub/repos/apt/ buster-pgdg main" | tee /etc/apt/sources.list.d/pgdg.list && \
apt-get update && apt-get -y install postgresql-client-12
ARG FUNCTION_DIR
RUN mkdir -p ${FUNCTION_DIR} && chmod -R 755 ${FUNCTION_DIR}
WORKDIR ${FUNCTION_DIR}
COPY package.json .
RUN npm install
COPY backup.sh .
RUN chmod +x backup.sh
COPY app.js .
ENTRYPOINT ["/usr/local/bin/npx", "aws-lambda-ric"]
CMD ["app.handler"]
I am running the docker container created with the image created from the above docker file
docker run -v ~/aws:/aws -it --rm -p 9000:8080 --entrypoint /aws/aws-lambda-rie backup-db:v1 /usr/local/bin/npx aws-lambda-ric app.handler
And trying to hit that docker container with below curl command
curl -XPOST "http://localhost:9000/2015-03-31/functions/function/invocations" -d '{}'
when I run curl command I am seeing the below error
An error I see is :"newPromise is not defined","trace":["ReferenceError: newPromise is not defined"," at backupDatabase (/function/app.js:9:3)","
Tried adding the variable var newPromise = require('es6-promise').Promise;, but that gave a new error "Cannot set property 'scqfkjngu7o' of undefined","trace"
Could someone help me with fixing the error ? My expected output is the message as described in the function, but am seeing the errors.
Thank you
Node 14 supports promises natively. You should do:
return new Promise((resolve, reject) => {
childProcess.execFile(scriptFilePath, (error) => {
if (error) {
console.error(error);
resolve(false);
}
resolve(true);
});
Note the space between new and Promise. Promise is the object and you are using a constructor. There is no need to import any module.
This the code I,m trying to execute:
#!/bin/bash
set -e -a
err_report() {
echo "Error on line $1"
}
trap 'err_report $LINENO' ERR
IFS=$'\n'
value=($(cat builds.txt))
prevBuild=${value[0]}
prevBuildDir=$prevBuild
currentBuild=${value[1]}
currentBuildDir=$currentBuild
splitPrevBuild=(${prevBuild//./$'\n'})
splitcurrentBuild=(${currentBuild//./$'\n'})
prevBuildSR=${splitPrevBuild[2]}
prevBuildHF=${splitPrevBuild[3]}
prevBuildNum=${splitPrevBuild[4]}
currentBuildSR=${splitcurrentBuild[2]}
currentBuildHF=${splitcurrentBuild[3]}
currentBuildNum=${splitcurrentBuild[4]}
function change {
cd ~/InstallationFiles/${currentBuildSR}/${currentBuildDir}/${currentBuild}extracted
}
change
pwd
ssh -T atao52#kvs-in-hsglap17.in.kronos.com <<-EOF
$(typeset -f)
change
pwd
echo hi
exit
EOF
How to access the variables as currently, it says
cd: /home/atao52/InstallationFiles///extracted: No such file or directory
try to execute shell script using nodejs and nodejs server gives wrong error message
shell script
sudo mkdir updateinprogress
servicebranch="Development"
currentpath="$PWD"
tarfilename="$(date +'%d-%m-%Y_%H-%M-%S')"
cd ..
if [ ! -d "backups" ]; then
sudo mkdir backups
fi
cd backups
if [ ! -d "AppInEngine" ]; then
sudo mkdir AppInEngine
fi
cd $currentpath
if [ ! -d "current_file_backup" ]; then
sudo mkdir current_file_backup
fi
sudo cp -r /var/www/AppInEngine $currentpath/current_file_backup/
sudo git clone https://github.com/abc/AppInEngine --branch $servicebranch -q
if [ $? != 0 ]; then
echo -e "{\"status\":\"false\", \"message\":\""error occerd while cloning git repository "\"}"
sudo rm -r AppInEngine updateinprogress current_file_backup
exit
fi
sudo cp -r /var/www/AppInEngine/configs AppInEngine
sudo cp -r /var/www/AppInEngine/Logs AppInEngine/
sudo rm -r /var/www/AppInEngine
sudo cp -r AppInEngine /var/www
sudo chmod -R 777 /var/www/AppInEngine
service apache2 restart > /dev/null
if [ $? != 0 ]; then
sudo rm -r /var/www/AppInEngine
sudo cp -r $currentpath/current_file_backup/AppInEngine /var/www/
sudo service apache2 restart > /dev/null
sudo rm -r AppInEngine updateinprogress current_file_backup
echo -e "{\"status\":\"false\", \"message\":\""error occerd while starting apache2 .restart with backup "\"}"
exit
fi
sleep 160
if curl -s --head --request GET http://localhost/AppInEngine/get_version | grep "200 OK" > /dev/null; then
sudo tar -cf AppInEngine_$tarfilename.tar.gz current_file_backup/AppInEngine
sudo mv AppInEngine_$tarfilename.tar.gz ../backups/AppInEngine/
cd ../backups/AppInEngine
#sudo cp AppInEngine$tarfilename.tar.gz
sudo rm -f $(ls -1t | tail -n +3)
cd $currentpath
sudo rm -r AppInEngine updateinprogress current_file_backup
echo -e "{\"status\":\"true\", \"message\":\""service update success"\"}"
exit
else
echo -e "{\"status\":\"false\", \"message\":\""service update faild .trying to start with backup "\"}"
sudo service apache2 stop
sudo rm -r /var/www/AppInEngine
sudo cp -r $currentpath/current_file_backup/AppInEngine /var/www/
sudo rm -r AppInEngine ls current_file_backup
chmod -R 777 /var/www/AppInEngine
sudo service apache2 start > /dev/null
exit
fi
nodejs server
var express = require('express');
var app = express();
var exec = require('child_process').exec, child;
app.listen(4105);
app.get('/updateEngin',function (req, res){
var testscript = exec('sh test.sh /var/www/update/serviceupdate');
testscript.stdout.on('data', function(data){
console.log(data);
res.json(data);
});
});
output when running script alone in terminal (./test.sh )
{"status":"true", "message":"service update success"}
ouput when running using nodejs
{"status":"false", "message":"error occerd while cloning git repository "}
{"status":"true", "message":"service update success"}
set time out to nodejs server
issue will fixed
var express = require('express');
var app = express();
var timeout = require('connect-timeout');
var exec = require('child_process').exec, child;
app.listen(4105);
app.get('/updateEngin', timeout('160s'), function (req, res){
var testscript = exec('sh test.sh /var/www/update/serviceupdate');
testscript.stdout.on('data', function(data){
console.log(data);
res.json(data);
});
});