I am using hyperleger fabric-sample network. I changed the configuration bit, added a new org into it, I am using balance transfer use case and when I am trying to enrol user I am getting the following error:
Request I sent:
curl -s -X POST http://localhost:4000/users -H "content-type: application/x-www-form-urlencoded" -d 'username=Jim&orgName=Org3'
Error I am getting:
{"success":false,"message":"failed Error: Common connection profile is missing this client's organization and mspid"}```
log of my node app:
[2019-10-19 13:24:57.682] [DEBUG] SampleWebApp - ------>>>>>> new request for /users
[2019-10-19 13:24:57.682] [DEBUG] SampleWebApp - End point : /users
[2019-10-19 13:24:57.682] [DEBUG] SampleWebApp - User name : Jim
[2019-10-19 13:24:57.682] [DEBUG] SampleWebApp - Org name : Org3
[2019-10-19 13:24:57.683] [DEBUG] Helper - getClientForOrg - ****** START Org3 undefined
[2019-10-19 13:24:57.690] [DEBUG] Helper - [NetworkConfig101.js]: constructor, network_config: {"name":"balance-transfer","x-type":"hlfv1","description":"Balance Transfer Network","version":"1.0","channels":{"mychannel":{"orderers":["orderer.example.com"],"peers":{"peer0.org1.example.com":{"endorsingPeer":true,"chaincodeQuery":true,"ledgerQuery":true,"eventSource":true},"peer1.org1.example.com":{"endorsingPeer":false,"chaincodeQuery":true,"ledgerQuery":true,"eventSource":false},"peer2.org1.example.com":{"endorsingPeer":false,"chaincodeQuery":true,"ledgerQuery":true,"eventSource":false},"peer0.org2.example.com":{"endorsingPeer":true,"chaincodeQuery":true,"ledgerQuery":true,"eventSource":true},"peer1.org2.example.com":{"endorsingPeer":false,"chaincodeQuery":true,"ledgerQuery":true,"eventSource":false},"peer2.org2.example.com":{"endorsingPeer":false,"chaincodeQuery":true,"ledgerQuery":true,"eventSource":false},"peer0.org3.example.com":{"endorsingPeer":true,"chaincodeQuery":true,"ledgerQuery":true,"eventSource":true},"peer1.org3.example.com":{"endorsingPeer":false,"chaincodeQuery":true,"ledgerQuery":true,"eventSource":false},"peer2.org3.example.com":{"endorsingPeer":false,"chaincodeQuery":true,"ledgerQuery":true,"eventSource":false},"peer0.org4.example.com":{"endorsingPeer":true,"chaincodeQuery":true,"ledgerQuery":true,"eventSource":true},"peer1.org4.example.com":{"endorsingPeer":false,"chaincodeQuery":true,"ledgerQuery":true,"eventSource":false},"peer2.org4.example.com":{"endorsingPeer":false,"chaincodeQuery":true,"ledgerQuery":true,"eventSource":false},"peer0.org5.example.com":{"endorsingPeer":true,"chaincodeQuery":true,"ledgerQuery":true,"eventSource":true},"peer1.org5.example.com":{"endorsingPeer":false,"chaincodeQuery":true,"ledgerQuery":true,"eventSource":false},"peer2.org5.example.com":{"endorsingPeer":false,"chaincodeQuery":true,"ledgerQuery":true,"eventSource":false},"peer0.org6.example.com":{"endorsingPeer":true,"chaincodeQuery":true,"ledgerQuery":true,"eventSource":true},"peer1.org6.example.com":{"endorsingPeer":false,"chaincodeQuery":true,"ledgerQuery":true,"eventSource":false},"peer2.org6.example.com":{"endorsingPeer":false,"chaincodeQuery":true,"ledgerQuery":true,"eventSource":false}},"chaincodes":["mycc:v0"]}},"organizations":{"Org1":{"mspid":"Org1MSP","peers":["peer0.org1.example.com","peer1.org1.example.com","peer2.org1.example.com"],"certificateAuthorities":["ca-org1"],"adminPrivateKey":{"path":"artifacts/channel/crypto-config/peerOrganizations/org1.example.com/users/Admin#org1.example.com/msp/keystore/c4179a68cc1f71e51919f7541be599ec0d2924426b4e64159fbf3fcaec419463_sk"},"signedCert":{"path":"artifacts/channel/crypto-config/peerOrganizations/org1.example.com/users/Admin#org1.example.com/msp/signcerts/Admin#org1.example.com-cert.pem"}},"Org2":{"mspid":"Org2MSP","peers":["peer0.org2.example.com","peer1.org2.example.com","peer2.org2.example.com"],"certificateAuthorities":["ca-org2"],"adminPrivateKey":{"path":"artifacts/channel/crypto-config/peerOrganizations/org2.example.com/users/Admin#org2.example.com/msp/keystore/eff846bd66dc8801f1979fa40a4fe238f5b6a5e0eda2ae052d3383606d508485_sk"},"signedCert":{"path":"artifacts/channel/crypto-config/peerOrganizations/org2.example.com/users/Admin#org2.example.com/msp/signcerts/Admin#org2.example.com-cert.pem"}},"Org3":{"mspid":"Org3MSP","peers":["peer0.org3.example.com","peer1.org3.example.com","peer2.org3.example.com"],"certificateAuthorities":["ca-org3"],"adminPrivateKey":{"path":"artifacts/channel/crypto-config/peerOrganizations/org3.example.com/users/Admin#org3.example.com/msp/keystore/9f40b162c33476c40b521d0e12f840429dbefcfcec097b6aa256b398f0910dea_sk"},"signedCert":{"path":"artifacts/channel/crypto-config/peerOrganizations/org3.example.com/users/Admin#org3.example.com/msp/signcerts/Admin#org3.example.com-cert.pem"}},"Org4":{"mspid":"Org4MSP","peers":["peer0.org4.example.com","peer1.org4.example.com","peer2.org4.example.com"],"certificateAuthorities":["ca-org4"],"adminPrivateKey":{"path":"artifacts/channel/crypto-config/peerOrganizations/org4.example.com/users/Admin#org4.example.com/msp/keystore/69b52012f2b133b1564dd22248ca7ca47895a433a6e23828db17ed9abb306e6c_sk"},"signedCert":{"path":"artifacts/channel/crypto-config/peerOrganizations/org4.example.com/users/Admin#org4.example.com/msp/signcerts/Admin#org4.example.com-cert.pem"}},"Org5":{"mspid":"Org5MSP","peers":["peer0.org5.example.com","peer1.org5.example.com","peer2.org5.example.com"],"certificateAuthorities":["ca-org5"],"adminPrivateKey":{"path":"artifacts/channel/crypto-config/peerOrganizations/org5.example.com/users/Admin#org5.example.com/msp/keystore/53237c803abb8d84a06bb6554289f0a8e1512de26c9778d84d26c8415c7ba242_sk"},"signedCert":{"path":"artifacts/channel/crypto-config/peerOrganizations/org5.example.com/users/Admin#org5.example.com/msp/signcerts/Admin#org5.example.com-cert.pem"}},"Org6":{"mspid":"Org6MSP","peers":["peer0.org6.example.com","peer1.org6.example.com","peer2.org6.example.com"],"certificateAuthorities":["ca-org6"],"adminPrivateKey":{"path":"artifacts/channel/crypto-config/peerOrganizations/org6.example.com/users/Admin#org6.example.com/msp/keystore/b46024bc7e730faa0427dff247474aef72f126fa4937a556bdc2d9c89f85e8fa_sk"},"signedCert":{"path":"artifacts/channel/crypto-config/peerOrganizations/org6.example.com/users/Admin#org6.example.com/msp/signcerts/Admin#org6.example.com-cert.pem"}}},"orderers":{"orderer.example.com":{"url":"grpcs://localhost:7050","grpcOptions":{"ssl-target-name-override":"orderer.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/ordererOrganizations/example.com/orderers/orderer.example.com/tls/ca.crt"}}},"peers":{"peer0.org1.example.com":{"url":"grpcs://localhost:7051","grpcOptions":{"ssl-target-name-override":"peer0.org1.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/ca.crt"}},"peer1.org1.example.com":{"url":"grpcs://localhost:7056","grpcOptions":{"ssl-target-name-override":"peer1.org1.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org1.example.com/peers/peer1.org1.example.com/tls/ca.crt"}},"peer2.org1.example.com":{"url":"grpcs://localhost:7062","grpcOptions":{"ssl-target-name-override":"peer2.org1.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org1.example.com/peers/peer2.org1.example.com/tls/ca.crt"}},"peer0.org2.example.com":{"url":"grpcs://localhost:8051","grpcOptions":{"ssl-target-name-override":"peer0.org2.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls/ca.crt"}},"peer1.org2.example.com":{"url":"grpcs://localhost:8056","eventUrl":"grpcs://localhost:8058","grpcOptions":{"ssl-target-name-override":"peer1.org2.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org2.example.com/peers/peer1.org2.example.com/tls/ca.crt"}},"peer2.org2.example.com":{"url":"grpcs://localhost:8062","grpcOptions":{"ssl-target-name-override":"peer2.org2.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org2.example.com/peers/peer2.org2.example.com/tls/ca.crt"}},"peer0.org3.example.com":{"url":"grpcs://localhost:9051","grpcOptions":{"ssl-target-name-override":"peer0.org3.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org3.example.com/peers/peer0.org3.example.com/tls/ca.crt"}},"peer1.org3.example.com":{"url":"grpcs://localhost:9056","grpcOptions":{"ssl-target-name-override":"peer1.org3.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org3.example.com/peers/peer1.org3.example.com/tls/ca.crt"}},"peer2.org3.example.com":{"url":"grpcs://localhost:9062","grpcOptions":{"ssl-target-name-override":"peer2.org3.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org3.example.com/peers/peer2.org3.example.com/tls/ca.crt"}},"peer0.org4.example.com":{"url":"grpcs://localhost:10051","grpcOptions":{"ssl-target-name-override":"peer0.org4.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org4.example.com/peers/peer0.org4.example.com/tls/ca.crt"}},"peer1.org4.example.com":{"url":"grpcs://localhost:10056","grpcOptions":{"ssl-target-name-override":"peer1.org4.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org4.example.com/peers/peer1.org4.example.com/tls/ca.crt"}},"peer2.org4.example.com":{"url":"grpcs://localhost:10062","grpcOptions":{"ssl-target-name-override":"peer2.org4.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org4.example.com/peers/peer2.org4.example.com/tls/ca.crt"}},"peer0.org5.example.com":{"url":"grpcs://localhost:11051","grpcOptions":{"ssl-target-name-override":"peer0.org5.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org5.example.com/peers/peer0.org5.example.com/tls/ca.crt"}},"peer1.org5.example.com":{"url":"grpcs://localhost:11056","grpcOptions":{"ssl-target-name-override":"peer1.org5.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org5.example.com/peers/peer1.org5.example.com/tls/ca.crt"}},"peer2.org5.example.com":{"url":"grpcs://localhost:11062","grpcOptions":{"ssl-target-name-override":"peer2.org5.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org5.example.com/peers/peer2.org5.example.com/tls/ca.crt"}},"peer0.org6.example.com":{"url":"grpcs://localhost:12051","grpcOptions":{"ssl-target-name-override":"peer0.org6.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org6.example.com/peers/peer0.org6.example.com/tls/ca.crt"}},"peer1.org6.example.com":{"url":"grpcs://localhost:12056","grpcOptions":{"ssl-target-name-override":"peer1.org6.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org6.example.com/peers/peer1.org6.example.com/tls/ca.crt"}},"peer2.org6.example.com":{"url":"grpcs://localhost:12062","grpcOptions":{"ssl-target-name-override":"peer2.org6.example.com"},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org6.example.com/peers/peer2.org6.example.com/tls/ca.crt"}}},"certificateAuthorities":{"ca-org1":{"url":"https://localhost:7054","httpOptions":{"verify":false},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org1.example.com/ca/ca.org1.example.com-cert.pem"},"registrar":[{"enrollId":"admin","enrollSecret":"adminpw"}],"caName":"ca-org1"},"ca-org2":{"url":"https://localhost:8054","httpOptions":{"verify":false},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org2.example.com/ca/ca.org2.example.com-cert.pem"},"registrar":[{"enrollId":"admin","enrollSecret":"adminpw"}],"caName":"ca-org2"},"ca-org3":{"url":"https://localhost:9054","httpOptions":{"verify":false},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org3.example.com/ca/ca.org3.example.com-cert.pem"},"registrar":[{"enrollId":"admin","enrollSecret":"adminpw"}],"caName":"ca-org3"},"ca-org4":{"url":"https://localhost:10054","httpOptions":{"verify":false},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org4.example.com/ca/ca.org4.example.com-cert.pem"},"registrar":[{"enrollId":"admin","enrollSecret":"adminpw"}],"caName":"ca-org4"},"ca-org5":{"url":"https://localhost:11054","httpOptions":{"verify":false},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org5.example.com/ca/ca.org5.example.com-cert.pem"},"registrar":[{"enrollId":"admin","enrollSecret":"adminpw"}],"caName":"ca-org5"},"ca-org6":{"url":"https://localhost:12054","httpOptions":{"verify":false},"tlsCACerts":{"path":"artifacts/channel/crypto-config/peerOrganizations/org6.example.com/ca/ca.org6.example.com-cert.pem"},"registrar":[{"enrollId":"admin","enrollSecret":"adminpw"}],"caName":"ca-org6"}}}
[2019-10-19 13:24:57.709] [DEBUG] Helper - [NetworkConfig101.js]: constructor, network_config: {"name":"balance-transfer-org3","x-type":"hlfv1","description":"Balance Transfer Network - client definition for org3","version":"1.0","client":{"organization":"org3","credentialStore":{"path":"./fabric-client-kv-org3","cryptoStore":{"path":"/tmp/fabric-client-kv-org3"},"wallet":"wallet-name"}}}
[2019-10-19 13:24:57.709] [DEBUG] Helper - [NetworkConfig101.js]: mergeSettings - additions start
[2019-10-19 13:24:57.709] [DEBUG] Helper - [NetworkConfig101.js]: getOrganization - name org3
[2019-10-19 13:24:57.709] [DEBUG] Helper - [NetworkConfig101.js]: getOrganization - name org3
[2019-10-19 13:24:57.710] [DEBUG] Helper - [FileKeyValueStore.js]: constructor { options:
{ path: '/home/srihari/hyperledger/fabric-samples/balance-transfer/fabric-client-kv-org3',
wallet: 'wallet-name',
cryptoStore: { path: '/tmp/fabric-client-kv-org3' } } }
[2019-10-19 13:24:57.713] [DEBUG] Helper - [crypto_ecdsa_aes]: Hash algorithm: SHA2, hash output size: 256
[2019-10-19 13:24:57.713] [DEBUG] Helper - [utils.CryptoKeyStore]: CryptoKeyStore, constructor - start
[2019-10-19 13:24:57.714] [DEBUG] Helper - [utils.CryptoKeyStore]: constructor, no super class specified, using config: fabric-client/lib/impl/FileKeyValueStore.js
[2019-10-19 13:24:57.714] [DEBUG] Helper - getClientForOrg - ****** END Org3 undefined
[2019-10-19 13:24:57.714] [DEBUG] Helper - Successfully initialized the credential stores
[2019-10-19 13:24:57.714] [DEBUG] Helper - [FileKeyValueStore.js]: getValue { key: 'Jim' }
[2019-10-19 13:24:57.715] [INFO] Helper - User Jim was not enrolled, so we will need an admin user object to register
[2019-10-19 13:24:57.715] [DEBUG] Helper - [FileKeyValueStore.js]: getValue { key: 'admin' }
[2019-10-19 13:24:57.716] [DEBUG] Helper - [NetworkConfig101.js]: getOrganization - name org3
[2019-10-19 13:24:57.717] [ERROR] Helper - Failed to get registered user: Jim with error: Error: Common connection profile is missing this client's organization and mspid
[2019-10-19 13:24:57.717] [DEBUG] SampleWebApp - -- returned from registering the username Jim for organization Org3
[2019-10-19 13:24:57.717] [DEBUG] SampleWebApp - Failed to register the username Jim for organization Org3 with::failed Error: Common connection profile is missing this client's organization and mspid
I appreciate any help.
I have looked into your repo,
I have found many mistakes
1) you have defined total 6 organizations in the network-config file, but you have created only 3 files in artifacts folder
2) when you create multiple organization connection-profile-path you have to update the same in the mentioned here file it should append orgname dynamically in order to switch b/w orgs
In artifacts/network-config.yaml file, under organizations:, you could try to use org3 instead of Org3.
I still can't figure out why o is in lower case for org3 but O are in upper case for Org1 and Org2.
It just so happens that it works for me!
I'm geeting a helm_release.istio_init: context deadline exceeded when trying to install istio-init to my kubernetes cluster
I'm using helm provider version 0.9.1 with tls enabled...
provider "helm" {
kubernetes {
host = "${var.cluster["endpoint"]}"
client_certificate = "${base64decode(var.cluster["client_certificate"])}"
client_key = "${base64decode(var.cluster["client_key"])}"
cluster_ca_certificate = "${base64decode(var.cluster["cluster_ca_certificate"])}"
}
install_tiller = "false"
tiller_image = "gcr.io/kubernetes-helm/tiller:v2.13.1"
service_account = "${kubernetes_service_account.terraform-tiller.metadata.0.name}"
namespace = "${kubernetes_service_account.terraform-tiller.metadata.0.namespace}"
enable_tls = "true"
insecure = "false"
client_key = "${file("${data.external.generate_certs.result["terraform_key"]}")}"
client_certificate = "${file("${data.external.generate_certs.result["terraform_cert"]}")}"
ca_certificate = "${file("${data.external.generate_certs.result["cert"]}")}"
}
# Installing istio
# Installing istio repository
# defining istio system namespace
resource "kubernetes_namespace" "istio_system" {
metadata {
name = "istio-system"
}
}
data "helm_repository" "istio" {
name = "istio"
url = "https://storage.googleapis.com/istio-release/releases/1.1.3/charts/"
}
# Installing istio-init resource
resource "helm_release" "istio_init" {
depends_on = ["data.external.install_tiller"]
namespace = "istio-system"
repository = "${data.helm_repository.istio.metadata.0.name}"
name = "istio-init"
chart = "istio-init"
}
I tried with insecure flag in true and works, but is not the idea.
My expected result is istio-init was installed but I'm got context deadline exceeded
This is the terraform plan output
terraform plan -out planning/plan.out
Refreshing Terraform state in-memory prior to plan...
The refreshed state will be used to calculate this plan, but will not be
persisted to local or remote state storage.
data.helm_repository.istio: Refreshing state...
------------------------------------------------------------------------
An execution plan has been generated and is shown below.
Resource actions are indicated with the following symbols:
+ create
Terraform will perform the following actions:
+ helm_release.istio_init
id: <computed>
chart: "istio-init"
disable_webhooks: "false"
force_update: "false"
metadata.#: <computed>
name: "istio-init"
namespace: "istio-system"
recreate_pods: "false"
repository: "istio"
reuse: "false"
reuse_values: "false"
status: "DEPLOYED"
timeout: "300"
verify: "false"
version: "1.1.3"
wait: "true"
Plan: 1 to add, 0 to change, 0 to destroy.
------------------------------------------------------------------------
This plan was saved to: planning/plan.out
To perform exactly these actions, run the following command to apply:
terraform apply "planning/plan.out"
This the apply command output
$ TF_LOG=DEBUG terraform apply "planning/plan.out"
2019/04/25 10:36:02 [INFO] Terraform version: 0.11.13
2019/04/25 10:36:02 [INFO] Go runtime version: go1.12
2019/04/25 10:36:02 [INFO] CLI args: []string{"/usr/local/Cellar/terraform/0.11.13/bin/terraform", "apply", "planning/plan.out"}
2019/04/25 10:36:02 [DEBUG] Attempting to open CLI config file: /Users/felipe/.terraformrc
2019/04/25 10:36:02 [DEBUG] File doesn't exist, but doesn't need to. Ignoring.
2019/04/25 10:36:02 [INFO] CLI command args: []string{"apply", "planning/plan.out"}
2019/04/25 10:36:02 [INFO] command: initializing local backend from plan (not set)
2019/04/25 10:36:02 [DEBUG] checking for provider in "."
2019/04/25 10:36:02 [DEBUG] checking for provider in "/usr/local/Cellar/terraform/0.11.13/bin"
2019/04/25 10:36:02 [DEBUG] checking for provider in ".terraform/plugins/darwin_amd64"
2019/04/25 10:36:02 [DEBUG] found provider "terraform-provider-helm_v0.9.1_x4"
2019/04/25 10:36:02 [DEBUG] found provider "terraform-provider-kubernetes_v1.6.2_x4"
2019/04/25 10:36:02 [DEBUG] found valid plugin: "helm", "0.9.1", "/Users/felipe/workspace/terraform-spec/helm/.terraform/plugins/darwin_amd64/terraform-provider-helm_v0.9.1_x4"
2019/04/25 10:36:02 [DEBUG] found valid plugin: "kubernetes", "1.6.2", "/Users/felipe/workspace/terraform-spec/helm/.terraform/plugins/darwin_amd64/terraform-provider-kubernetes_v1.6.2_x4"
2019/04/25 10:36:02 [DEBUG] checking for provisioner in "."
2019/04/25 10:36:02 [DEBUG] checking for provisioner in "/usr/local/Cellar/terraform/0.11.13/bin"
2019/04/25 10:36:02 [DEBUG] checking for provisioner in ".terraform/plugins/darwin_amd64"
2019/04/25 10:36:02 [INFO] command: backend initialized: *local.Local
2019/04/25 10:36:02 [DEBUG] checking for provider in "."
2019/04/25 10:36:02 [DEBUG] checking for provider in "/usr/local/Cellar/terraform/0.11.13/bin"
2019/04/25 10:36:02 [DEBUG] checking for provider in ".terraform/plugins/darwin_amd64"
2019/04/25 10:36:02 [DEBUG] found provider "terraform-provider-helm_v0.9.1_x4"
2019/04/25 10:36:02 [DEBUG] found provider "terraform-provider-kubernetes_v1.6.2_x4"
2019/04/25 10:36:02 [DEBUG] found valid plugin: "helm", "0.9.1", "/Users/felipe/workspace/terraform-spec/helm/.terraform/plugins/darwin_amd64/terraform-provider-helm_v0.9.1_x4"
2019/04/25 10:36:02 [DEBUG] found valid plugin: "kubernetes", "1.6.2", "/Users/felipe/workspace/terraform-spec/helm/.terraform/plugins/darwin_amd64/terraform-provider-kubernetes_v1.6.2_x4"
2019/04/25 10:36:02 [DEBUG] checking for provisioner in "."
2019/04/25 10:36:02 [DEBUG] checking for provisioner in "/usr/local/Cellar/terraform/0.11.13/bin"
2019/04/25 10:36:02 [DEBUG] checking for provisioner in ".terraform/plugins/darwin_amd64"
2019/04/25 10:36:02 [INFO] backend/local: starting Apply operation
2019/04/25 10:36:02 [INFO] terraform: building graph: GraphTypeApply
iresNew:false, Sensitive:false, Type:0x0}, "status":*terraform.ResourceAttrDiff{Old:"", New:"DEPLOYED", NewComputed:false, NewRemoved:false, NewExtra:interface {}(nil), RequiresNew:false, Sensitive:false, Type:0x0}, "timeout":*terraform.ResourceAttrDiff{Old:"", New:"300", NewComputed:false, NewRemoved:false, NewExtra:interface {}(nil), RequiresNew:false, Sensitive:false, Type:0x0}, "verify":*terraform.ResourceAttrDiff{Old:"", New:"false", NewComputed:false, NewRemoved:false, NewExtra:interface {}(nil), RequiresNew:false, Sensitive:false, Type:0x0}, "version":*terraform.ResourceAttrDiff{Old:"", New:"1.1.3", NewComputed:false, NewRemoved:false, NewExtra:interface {}(nil), RequiresNew:false, Sensitive:false, Type:0x0}, "wait":*terraform.ResourceAttrDiff{Old:"", New:"true", NewComputed:false, NewRemoved:false, NewExtra:interface {}(nil), RequiresNew:false, Sensitive:false, Type:0x0}}, Destroy:false, DestroyDeposed:false, DestroyTainted:false, Meta:map[string]interface {}(nil)}
2019/04/25 10:36:02 [DEBUG] Resource state not found for "helm_release.istio_init": helm_release.istio_init
2019/04/25 10:36:02 [TRACE] Graph after step *terraform.AttachStateTransformer:
helm_release.istio_init - *terraform.NodeApplyableResource
2019/04/25 10:36:02 [DEBUG] ReferenceTransformer: "helm_release.istio_init" references: []
2019/04/25 10:36:02 [DEBUG] ReferenceTransformer: "provider.helm" references: []
2019-04-25T10:36:02.359-0400 [DEBUG] plugin: starting plugin: path=/Users/felipe/workspace/terraform-spec/helm/.terraform/plugins/darwin_amd64/terraform-provider-helm_v0.9.1_x4 args=[/Users/felipe/workspace/terraform-spec/helm/.terraform/plugins/darwin_amd64/terraform-provider-helm_v0.9.1_x4]
2019-04-25T10:36:02.363-0400 [DEBUG] plugin: waiting for RPC address: path=/Users/felipe/workspace/terraform-spec/helm/.terraform/plugins/darwin_amd64/terraform-provider-helm_v0.9.1_x4
2019-04-25T10:36:02.399-0400 [DEBUG] plugin.terraform-provider-helm_v0.9.1_x4: plugin address: timestamp=2019-04-25T10:36:02.398-0400 address=/var/folders/rx/lrb60s6929j3ldg6tlwtcf4m0000gn/T/plugin586870690 network=unix
2019-04-25T10:36:02.405-0400 [DEBUG] plugin.terraform-provider-helm_v0.9.1_x4: 2019/04/25 10:36:02 [DEBUG] TLS settings:
[OMITED]
2019-04-25T10:36:02.813-0400 [DEBUG] plugin.terraform-provider-helm_v0.9.1_x4: 2019/04/25 10:36:02 [DEBUG] Fetched istio/istio-init to /Users/felipe/.helm/cache/archive/istio-init-1.1.3.tgz
2019-04-25T10:36:03.097-0400 [DEBUG] plugin.terraform-provider-helm_v0.9.1_x4: 2019/04/25 10:36:03 [DEBUG] Fetched istio/istio-init to /Users/felipe/.helm/cache/archive/istio-init-1.1.3.tgz
2019/04/25 10:36:03 [DEBUG] apply: helm_release.istio_init: executing Apply
helm_release.istio_init: Creating...
chart: "" => "istio-init"
disable_webhooks: "" => "false"
force_update: "" => "false"
metadata.#: "" => "<computed>"
name: "" => "istio-init"
namespace: "" => "istio-system"
recreate_pods: "" => "false"
repository: "" => "istio"
reuse: "" => "false"
reuse_values: "" => "false"
status: "" => "DEPLOYED"
timeout: "" => "300"
verify: "" => "false"
version: "" => "1.1.3"
wait: "" => "true"
2019-04-25T10:36:04.632-0400 [DEBUG] plugin.terraform-provider-helm_v0.9.1_x4: 2019/04/25 10:36:04 [DEBUG] Created tunnel using local port: '50275'
2019-04-25T10:36:04.632-0400 [DEBUG] plugin.terraform-provider-helm_v0.9.1_x4: 2019/04/25 10:36:04 [DEBUG] Found TLS settings: configuring helm client with TLS
2019-04-25T10:36:09.634-0400 [DEBUG] plugin.terraform-provider-helm_v0.9.1_x4: 2019/04/25 10:36:09 [DEBUG] could not get release context deadline exceeded
2019/04/25 10:36:09 [ERROR] root: eval: *terraform.EvalApplyPost, err: 1 error(s) occurred:
* helm_release.istio_init: context deadline exceeded
2019/04/25 10:36:09 [ERROR] root: eval: *terraform.EvalSequence, err: 1 error(s) occurred:
* helm_release.istio_init: context deadline exceeded
2019/04/25 10:36:09 [DEBUG] plugin: waiting for all plugin processes to complete...
Error: Error applying plan:
1 error(s) occurred:
* helm_release.istio_init: 1 error(s) occurred:
* helm_release.istio_init: context deadline exceeded
Terraform does not automatically rollback in the face of errors.
Instead, your Terraform state file has been partially updated with
any resources that successfully completed. Please address the error
above and apply again to incrementally change your infrastructure.
2019-04-25T10:36:09.645-0400 [DEBUG] plugin.terraform-provider-helm_v0.9.1_x4: 2019/04/25 10:36:09 [ERR] plugin: plugin server: accept unix /var/folders/rx/lrb60s6929j3ldg6tlwtcf4m0000gn/T/plugin586870690: use of closed network connection
2019-04-25T10:36:09.648-0400 [DEBUG] plugin: plugin process exited: path=/Users/felipe/workspace/terraform-spec/helm/.terraform/plugins/darwin_amd64/terraform-provider-helm_v0.9.1_x4
I have had this error before and it usually means that I'm not running tiller.
I see you have set
install_tiller = "false"
which means you need to run it locally.
I run this:
tiller -storage secret
I have created a proxy_match.yaml file as a hiera source file in
default hiera datalocation.
The proxy_match.yaml is added in hiera hierarchy
Looking up hiera data in profile
Where and what am I missing, I am not able to receive the hiera data
value and thus the error appears mentioned bellow.
Where,
proxy_match is the new environment created
hierafile 1
/etc/puppetlabs/code/environments/proxy_match/hiera.yaml
version: 5
defaults:
# The default value for "datadir" is "data" under the same directory as the hiera.yaml
# file (this file)
# When specifying a datadir, make sure the directory exists.
# See https://docs.puppet.com/puppet/latest/environments.html for further details on environments.
# datadir: data
# data_hash: yaml_data
hierarchy:
- name: "environment specific yaml"
path: "proxy_match.yaml"
- name: "Per-node data (yaml version)"
path: "nodes/%{::trusted.certname}.yaml"
- name: "Other YAML hierarchy levels"
paths:
- "common.yaml"
proxy_match.yaml hiera data source file
This is the yaml hiera source named as proxy_match.yaml as in herarchy
/etc/puppetlabs/code/environments/proxy_match/data/proxy-match.yaml
---
profiles::apache::servername: "taraserver.com"
profiles::apache::port: "80"
profiles::apache::docroot: "/var/www/tarahost"
hiera lookup in profile
$servername = hiera('profiles::apache::servername',{})
$port = hiera('profiles::apache::port',{})
$docroot = hiera('profiles::apache::docroot',{})
class profile::apache{
#configure apache
include apache
apache::vhost{$servername:
port => $port,
docroot => $docroot,
}
}
#ERROR:
Info: Retrieving pluginfacts
Info: Retrieving plugin
Info: Loading facts
Error: Could not retrieve catalog from remote server: Error 500 on SERVER: {"message":"Server Error: Evaluation Error: Error while evaluating a Resource Statement, Apache::Vhost[7fba80ae621c.domain.name]: parameter 'docroot' expects a value of type Boolean or String, got Undef at /etc/puppetlabs/code/environments/proxy_match/modules/profile/manifests/apache.pp:29 on node 94707b03ff05.domain.name","issue_kind":"RUNTIME_ERROR"}
Warning: Not using cache on failed catalog
Error: Could not retrieve catalog; skipping run
You are defining the variables outside the class definition. When Puppet loads that class, those lines you have before the class are ignored.
What you should have in your profile class is:
class profiles::apache (
String $servername,
Integer $port,
String $docroot,
) {
include apache
apache::vhost { $servername:
port => $port,
docroot => $docroot,
}
}
Note that I used the automatic parameter lookup feature to set your variables, instead of explicit calls to the hiera function.