Topics

ONAP aaf-sms POD not Coming up - 10443: connect: connection refused #dmaap #helm #aaf #dublin


Raghu Ram
 

Hi All,
I am trying to deploy aaf and dmaap in a 3 node Cluster (1 Control plane and 2 Worker VM's).
dmaap is failing and you can see 2 JOBS are not coming up successfully.

Cant figure out what i am missing?

aarna@ubuntu:~/oom/kubernetes$ kubectl top nodes
NAME       CPU(cores)   CPU%   MEMORY(bytes)   MEMORY%   
dublin01   275m         2%     1422Mi          2%        
dublin02   1807m        15%    5121Mi          10%       
dublin03   1154m        9%     3327Mi          6%

I am using this yaml file to deploy .
aarna@ubuntu:~/oom/kubernetes$ cat onap/resources/overrides/onap-simple.yaml 
# Copyright © 2019 Amdocs, Bell Canada
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#       http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
 
###################################################################
# This override file enables ONAP Application helm charts for the
# vFW use case.
###################################################################
cassandra:
  enabled: true
mariadb-galera:
  enabled: true
 
aaf:
  enabled: true
dmaap:
  enabled: true

Following is the command to deploy.
aarna@ubuntu:~/oom/kubernetes$ nohup helm deploy dev local/onap -f onap/resources/overrides/onap-simple.yaml --namespace onap --timeout 1200 > ~/helm_install.log &
Status of the NFS folder as deployment started
aarna@ubuntu:~/oom/kubernetes$ ls -l /dockerdata-nfs/
total 4
drwxr-xr-x 3 root root 4096 Aug 11 12:48 dev-aaf
 
aarna@ubuntu:~/oom/kubernetes$ kubectl get pod -n onap
NAME                                                    READY   STATUS              RESTARTS   AGE   IP           NODE       NOMINATED NODE   READINESS GATES
dev-aaf-aaf-cass-76c8c584f-ss64m                        0/1     Running             0          37s   10.42.1.9    dublin02   <none>           <none>
dev-aaf-aaf-cm-6d7959c65-zfpmw                          0/1     Init:0/1            0          37s   <none>       dublin03   <none>           <none>
dev-aaf-aaf-fs-9898d569-45v69                           0/1     Init:0/1            0          37s   10.42.2.13   dublin03   <none>           <none>
dev-aaf-aaf-gui-586484467-xc92v                         0/1     Init:0/1            0          37s   10.42.2.15   dublin03   <none>           <none>
dev-aaf-aaf-locate-6867c7fb9d-k2b8h                     0/1     Init:0/1            0          37s   <none>       dublin03   <none>           <none>
dev-aaf-aaf-oauth-59cb84d56d-tstrv                      0/1     Init:0/1            0          37s   10.42.2.12   dublin03   <none>           <none>
dev-aaf-aaf-service-5c598d6fb-4zf45                     0/1     Init:0/1            0          37s   10.42.1.7    dublin02   <none>           <none>
dev-aaf-aaf-sms-769cd9fd8b-g46cj                        0/1     Running             0          36s   10.42.1.6    dublin02   <none>           <none>
dev-aaf-aaf-sms-preload-g587s                           0/1     Init:0/1            0          37s   10.42.2.5    dublin03   <none>           <none>
dev-aaf-aaf-sms-quorumclient-0                          1/1     Running             0          37s   10.42.1.5    dublin02   <none>           <none>
dev-aaf-aaf-sms-quorumclient-1                          0/1     ContainerCreating   0          31s   <none>       dublin03   <none>           <none>
dev-aaf-aaf-sms-vault-0                                 1/2     Error               2          37s   10.42.2.4    dublin03   <none>           <none>
dev-aaf-aaf-sshsm-distcenter-786lw                      0/1     Completed           0          37s   10.42.2.7    dublin03   <none>           <none>
dev-aaf-aaf-sshsm-testca-mrs6b                          0/1     Init:0/1            0          37s   10.42.2.6    dublin03   <none>           <none>
dev-cassandra-cassandra-0                               0/1     Running             0          36s   10.42.1.10   dublin02   <none>           <none>
dev-dmaap-dbc-pg-0                                      0/1     Running             0          33s   10.42.1.12   dublin02   <none>           <none>
dev-dmaap-dbc-pgpool-5fb9d89d5c-f24dr                   0/1     Running             0          34s   10.42.2.14   dublin03   <none>           <none>
dev-dmaap-dbc-pgpool-5fb9d89d5c-gm76k                   0/1     Running             0          34s   10.42.1.8    dublin02   <none>           <none>
dev-dmaap-dmaap-bc-f4c5ff9fc-brkr7                      0/1     Init:0/2            0          34s   10.42.1.11   dublin02   <none>           <none>
dev-dmaap-dmaap-bc-post-install-z2kw9                   1/1     Running             0          33s   10.42.1.18   dublin02   <none>           <none>
dev-dmaap-dmaap-dr-db-0                                 0/1     Init:0/1            0          33s   <none>       dublin02   <none>           <none>
dev-dmaap-dmaap-dr-node-0                               0/2     Init:0/2            0          33s   <none>       dublin03   <none>           <none>
dev-dmaap-dmaap-dr-prov-8677d4cbd6-w8vlg                0/2     Init:0/1            0          34s   10.42.2.11   dublin03   <none>           <none>
dev-dmaap-message-router-0                              0/1     Init:0/1            0          33s   <none>       dublin03   <none>           <none>
dev-dmaap-message-router-kafka-0                        0/1     Init:1/3            0          33s   10.42.2.8    dublin03   <none>           <none>
dev-dmaap-message-router-kafka-1                        0/1     Init:1/3            0          33s   10.42.1.14   dublin02   <none>           <none>
dev-dmaap-message-router-kafka-2                        0/1     Init:0/3            0          33s   <none>       dublin02   <none>           <none>
dev-dmaap-message-router-mirrormaker-6fd88d5bc4-k8fn2   0/1     Init:0/2            0          34s   <none>       dublin02   <none>           <none>
dev-dmaap-message-router-zookeeper-0                    0/1     Init:0/1            0          33s   <none>       dublin02   <none>           <none>
dev-dmaap-message-router-zookeeper-1                    0/1     Running             0          33s   10.42.2.9    dublin03   <none>           <none>
dev-dmaap-message-router-zookeeper-2                    0/1     Init:0/1            0          33s   <none>       dublin02   <none>           <none>


aarna@ubuntu:~/oom/kubernetes$ kubectl get jobs -n onap
NAME                                    COMPLETIONS   DURATION   AGE
dev-aaf-aaf-sms-preload                 0/1           20m        20m
dev-aaf-aaf-sshsm-distcenter            1/1           30s        20m
dev-aaf-aaf-sshsm-testca                1/1           62s        20m
dev-dmaap-dmaap-bc-post-install         1/1           3m         20m
dev-dmaap-dmaap-dr-prov-post-install    0/1           16m        16m
dev-dmaap-message-router-post-install   1/1           51s        17m
 
aarna@ubuntu:~/oom/kubernetes$ kubectl get pod -n onap -o=wide | grep -v 'Running\|Completed'
NAME                                                    READY   STATUS             RESTARTS   AGE     IP           NODE       NOMINATED NODE   READINESS GATES
dev-aaf-aaf-sms-769cd9fd8b-g46cj                        0/1     CrashLoopBackOff   6          11m     10.42.1.6    dublin02   <none>           <none>
dev-aaf-aaf-sms-preload-g587s                           0/1     Init:0/1           1          11m     10.42.2.5    dublin03   <none>           <none>
dev-aaf-aaf-sms-vault-0                                 1/2     CrashLoopBackOff   7          11m     10.42.2.4    dublin03   <none>           <none>
dev-dmaap-dmaap-dr-node-0                               0/2     Init:0/2           1          11m     10.42.2.18   dublin03   <none>           <none>
dev-dmaap-dmaap-dr-prov-8677d4cbd6-w8vlg                0/2     Init:0/1           1          11m     10.42.2.11   dublin03   <none>           <none>
 
aarna@ubuntu:~/oom/kubernetes$ kubectl describe pod -n onap dev-aaf-aaf-sms-769cd9fd8b-g46cj
Name:               dev-aaf-aaf-sms-769cd9fd8b-g46cj
Namespace:          onap
Priority:           0
PriorityClassName:  <none>
Node:               dublin02/192.168.122.222
Start Time:         Sun, 11 Aug 2019 12:48:54 +0000
Labels:             app=aaf-sms
                    pod-template-hash=769cd9fd8b
                    release=dev-aaf
Annotations:        cni.projectcalico.org/podIP: 10.42.1.6/32
Status:             Running
IP:                 10.42.1.6
Controlled By:      ReplicaSet/dev-aaf-aaf-sms-769cd9fd8b
Init Containers:
  aaf-sms-readiness:
    Container ID:  docker://a55803af1408ebdd132e7356848f77c7120608888f5c4523a1f9672854370637
    Image:         oomk8s/readiness-check:2.0.2
    Image ID:      docker://sha256:cdfde4f65367407ebe257ca5b775c32668cd2a54679992ac81bf14f1c477189c
    Port:          <none>
    Host Port:     <none>
    Command:
      /root/ready.py
    Args:
      --container-name
      aaf-sms-vault
      --container-name
      aaf-sms-vault-backend
    State:          Terminated
      Reason:       Completed
      Exit Code:    0
      Started:      Sun, 11 Aug 2019 12:48:56 +0000
      Finished:     Sun, 11 Aug 2019 12:49:19 +0000
    Ready:          True
    Restart Count:  0
    Environment:
      NAMESPACE:  onap (v1:metadata.namespace)
    Mounts:
      /var/run/secrets/kubernetes.io/serviceaccount from default-token-67qsf (ro)
Containers:
  aaf-sms:
    Container ID:  docker://5bdba578eb70b3f9781a8f6c3183ef12851be6b63ec980e2700304ebca3accf0
    Image:         nexus3.onap.org:10001/onap/aaf/sms:4.0.1
    Image ID:      docker://sha256:2acdc7be71c2864e46b82cf978bb4716783e52eea2d54c0e72373fd5d832806a
    Port:          10443/TCP
    Host Port:     0/TCP
    Command:
      /sms/bin/sms
    State:          Waiting
      Reason:       CrashLoopBackOff
    Last State:     Terminated
      Reason:       Error
      Exit Code:    2
      Started:      Sun, 11 Aug 2019 12:58:30 +0000
      Finished:     Sun, 11 Aug 2019 12:59:59 +0000
    Ready:          False
    Restart Count:  6
    Liveness:       http-get https://:10443/v1/sms/quorum/status delay=10s timeout=1s period=30s #success=1 #failure=3
    Readiness:      http-get https://:10443/v1/sms/quorum/status delay=10s timeout=1s period=30s #success=1 #failure=3
    Environment:    <none>
    Mounts:
      /etc/localtime from localtime (ro)
      /sms/auth from dev-aaf-aaf-sms-auth (rw)
      /sms/smsconfig.json from aaf-sms (rw)
      /var/run/secrets/kubernetes.io/serviceaccount from default-token-67qsf (ro)
Conditions:
  Type              Status
  Initialized       True 
  Ready             False 
  ContainersReady   False 
  PodScheduled      True 
Volumes:
  localtime:
    Type:          HostPath (bare host directory volume)
    Path:          /etc/localtime
    HostPathType:  
  aaf-sms:
    Type:      ConfigMap (a volume populated by a ConfigMap)
    Name:      dev-aaf-aaf-sms
    Optional:  false
  dev-aaf-aaf-sms-auth:
    Type:       PersistentVolumeClaim (a reference to a PersistentVolumeClaim in the same namespace)
    ClaimName:  dev-aaf-aaf-sms
    ReadOnly:   false
  default-token-67qsf:
    Type:        Secret (a volume populated by a Secret)
    SecretName:  default-token-67qsf
    Optional:    false
QoS Class:       BestEffort
Node-Selectors:  <none>
Tolerations:     node.kubernetes.io/not-ready:NoExecute for 300s
                 node.kubernetes.io/unreachable:NoExecute for 300s
Events:
  Type     Reason     Age                  From               Message
  ----     ------     ----                 ----               -------
  Normal   Scheduled  13m                  default-scheduler  Successfully assigned onap/dev-aaf-aaf-sms-769cd9fd8b-g46cj to dublin02
  Normal   Pulled     13m                  kubelet, dublin02  Container image "oomk8s/readiness-check:2.0.2" already present on machine
  Normal   Created    13m                  kubelet, dublin02  Created container
  Normal   Started    13m                  kubelet, dublin02  Started container
  Normal   Started    11m (x2 over 12m)    kubelet, dublin02  Started container
  Warning  Unhealthy  9m49s (x6 over 12m)  kubelet, dublin02  Readiness probe failed: Get https://10.42.1.6:10443/v1/sms/quorum/status: dial tcp 10.42.1.6:10443: connect: connection refused
  Normal   Pulled     9m31s (x3 over 12m)  kubelet, dublin02  Container image "nexus3.onap.org:10001/onap/aaf/sms:4.0.1" already present on machine
  Normal   Created    9m31s (x3 over 12m)  kubelet, dublin02  Created container
  Normal   Killing    9m31s (x2 over 11m)  kubelet, dublin02  Killing container with id docker://aaf-sms:Container failed liveness probe.. Container will be killed and recreated.
  Warning  Unhealthy  3m1s (x19 over 12m)  kubelet, dublin02  Liveness probe failed: Get https://10.42.1.6:10443/v1/sms/quorum/status: dial tcp 10.42.1.6:10443: connect: connection refused
 
 
aarna@ubuntu:~/oom/kubernetes$ kubectl describe pod -n onap dev-aaf-aaf-sms-vault-0
Name:               dev-aaf-aaf-sms-vault-0
Namespace:          onap
Priority:           0
PriorityClassName:  <none>
Node:               dublin03/192.168.122.223
Start Time:         Sun, 11 Aug 2019 12:48:53 +0000
Labels:             app=aaf-sms-vault
                    controller-revision-hash=dev-aaf-aaf-sms-vault-7d74d6644b
                    release=dev-aaf
                    statefulset.kubernetes.io/pod-name=dev-aaf-aaf-sms-vault-0
Annotations:        cni.projectcalico.org/podIP: 10.42.2.4/32
Status:             Running
IP:                 10.42.2.4
Controlled By:      StatefulSet/dev-aaf-aaf-sms-vault
Containers:
  aaf-sms-vault:
    Container ID:  docker://dfa585bfb301d63fc9d6b14aff22057779fa6b7b2482355a2f67b81e28b7dfd6
    Image:         nexus3.onap.org:10001/library/vault:0.10.0
    Image ID:      docker://sha256:2d9787f3b75218942989a01cc09c9fef233f216aeddfa621f2ef0e5e99de105f
    Port:          8200/TCP
    Host Port:     0/TCP
    Command:
      vault
      server
      -config
      /vault/config/config.json
    State:          Running
      Started:      Sun, 11 Aug 2019 12:49:00 +0000
    Last State:     Terminated
      Reason:       Error
      Exit Code:    1
      Started:      Sun, 11 Aug 2019 12:48:57 +0000
      Finished:     Sun, 11 Aug 2019 12:48:57 +0000
    Ready:          True
    Restart Count:  1
    Environment:    <none>
    Mounts:
      /etc/localtime from localtime (ro)
      /var/run/secrets/kubernetes.io/serviceaccount from default-token-67qsf (ro)
      /vault/config/config.json from dev-aaf-aaf-sms-vault-vault (rw)
  aaf-sms-vault-backend:
    Container ID:  docker://0fc71be06f4dd9095155776065795a14f04b0aa48747e7109ad23df36d1eddfd
    Image:         nexus3.onap.org:10001/library/consul:1.0.6
    Image ID:      docker://sha256:5f4915f05e270fdb302322ac0f440e392c07d7e0ca154a2a4f28a781b57cabfc
    Port:          8500/TCP
    Host Port:     0/TCP
    Command:
      consul
      agent
      -server
      -client
      0.0.0.0
      -bootstrap-expect=1
      -config-file
      /consul/config/config.json
    State:          Waiting
      Reason:       CrashLoopBackOff
    Last State:     Terminated
      Reason:       Error
      Exit Code:    2
      Started:      Sun, 11 Aug 2019 12:55:50 +0000
      Finished:     Sun, 11 Aug 2019 12:55:57 +0000
    Ready:          False
    Restart Count:  6
    Environment:    <none>
    Mounts:
      /consul/config/config.json from dev-aaf-aaf-sms-vault-consulconfiguration (rw)
      /consul/data from dev-aaf-aaf-sms-vault-consuldata (rw)
      /etc/localtime from localtime (ro)
      /var/run/secrets/kubernetes.io/serviceaccount from default-token-67qsf (ro)
Conditions:
  Type              Status
  Initialized       True 
  Ready             False 
  ContainersReady   False 
  PodScheduled      True 
Volumes:
  dev-aaf-aaf-sms-vault-consuldata:
    Type:       PersistentVolumeClaim (a reference to a PersistentVolumeClaim in the same namespace)
    ClaimName:  dev-aaf-aaf-sms-vault
    ReadOnly:   false
  dev-aaf-aaf-sms-vault-consulconfiguration:
    Type:      ConfigMap (a volume populated by a ConfigMap)
    Name:      dev-aaf-aaf-sms-vault-consul
    Optional:  false
  dev-aaf-aaf-sms-vault-vault:
    Type:      ConfigMap (a volume populated by a ConfigMap)
    Name:      dev-aaf-aaf-sms-vault-vault
    Optional:  false
  localtime:
    Type:          HostPath (bare host directory volume)
    Path:          /etc/localtime
    HostPathType:  
  default-token-67qsf:
    Type:        Secret (a volume populated by a Secret)
    SecretName:  default-token-67qsf
    Optional:    false
QoS Class:       BestEffort
Node-Selectors:  <none>
Tolerations:     node.kubernetes.io/not-ready:NoExecute for 300s
                 node.kubernetes.io/unreachable:NoExecute for 300s
Events:
  Type     Reason     Age                  From               Message
  ----     ------     ----                 ----               -------
  Normal   Scheduled  10m                  default-scheduler  Successfully assigned onap/dev-aaf-aaf-sms-vault-0 to dublin03
  Normal   Pulled     10m (x2 over 10m)    kubelet, dublin03  Container image "nexus3.onap.org:10001/library/vault:0.10.0" already present on machine
  Normal   Created    10m (x2 over 10m)    kubelet, dublin03  Created container
  Normal   Started    10m (x2 over 10m)    kubelet, dublin03  Started container
  Normal   Pulled     9m31s (x4 over 10m)  kubelet, dublin03  Container image "nexus3.onap.org:10001/library/consul:1.0.6" already present on machine
  Normal   Created    9m30s (x4 over 10m)  kubelet, dublin03  Created container
  Normal   Started    9m29s (x4 over 10m)  kubelet, dublin03  Started container
  Warning  BackOff    48s (x41 over 10m)   kubelet, dublin03  Back-off restarting failed container
 
 
  aarna@ubuntu:~/oom/kubernetes$ kubectl describe pod -n onap dev-aaf-aaf-sms-vault-0
  Name:               dev-aaf-aaf-sms-vault-0
  Namespace:          onap
  Priority:           0
  PriorityClassName:  <none>
  Node:               dublin03/192.168.122.223
  Start Time:         Sun, 11 Aug 2019 12:48:53 +0000
  Labels:             app=aaf-sms-vault
                      controller-revision-hash=dev-aaf-aaf-sms-vault-7d74d6644b
                      release=dev-aaf
                      statefulset.kubernetes.io/pod-name=dev-aaf-aaf-sms-vault-0
  Annotations:        cni.projectcalico.org/podIP: 10.42.2.4/32
  Status:             Running
  IP:                 10.42.2.4
  Controlled By:      StatefulSet/dev-aaf-aaf-sms-vault
  Containers:
    aaf-sms-vault:
      Container ID:  docker://dfa585bfb301d63fc9d6b14aff22057779fa6b7b2482355a2f67b81e28b7dfd6
      Image:         nexus3.onap.org:10001/library/vault:0.10.0
      Image ID:      docker://sha256:2d9787f3b75218942989a01cc09c9fef233f216aeddfa621f2ef0e5e99de105f
      Port:          8200/TCP
      Host Port:     0/TCP
      Command:
        vault
        server
        -config
        /vault/config/config.json
      State:          Running
        Started:      Sun, 11 Aug 2019 12:49:00 +0000
      Last State:     Terminated
        Reason:       Error
        Exit Code:    1
        Started:      Sun, 11 Aug 2019 12:48:57 +0000
        Finished:     Sun, 11 Aug 2019 12:48:57 +0000
      Ready:          True
      Restart Count:  1
      Environment:    <none>
      Mounts:
        /etc/localtime from localtime (ro)
        /var/run/secrets/kubernetes.io/serviceaccount from default-token-67qsf (ro)
        /vault/config/config.json from dev-aaf-aaf-sms-vault-vault (rw)
    aaf-sms-vault-backend:
      Container ID:  docker://b0ea37bf069ce95a4d8e973df4f6c05c41a635b35e0f486a527cb72f4fb752de
      Image:         nexus3.onap.org:10001/library/consul:1.0.6
      Image ID:      docker://sha256:5f4915f05e270fdb302322ac0f440e392c07d7e0ca154a2a4f28a781b57cabfc
      Port:          8500/TCP
      Host Port:     0/TCP
      Command:
        consul
        agent
        -server
        -client
        0.0.0.0
        -bootstrap-expect=1
        -config-file
        /consul/config/config.json
      State:          Waiting
        Reason:       CrashLoopBackOff
      Last State:     Terminated
        Reason:       Error
        Exit Code:    2
        Started:      Sun, 11 Aug 2019 13:00:58 +0000
        Finished:     Sun, 11 Aug 2019 13:01:07 +0000
      Ready:          False
      Restart Count:  7
      Environment:    <none>
      Mounts:
        /consul/config/config.json from dev-aaf-aaf-sms-vault-consulconfiguration (rw)
        /consul/data from dev-aaf-aaf-sms-vault-consuldata (rw)
        /etc/localtime from localtime (ro)
        /var/run/secrets/kubernetes.io/serviceaccount from default-token-67qsf (ro)
  Conditions:
    Type              Status
    Initialized       True 
    Ready             False 
    ContainersReady   False 
    PodScheduled      True 
  Volumes:
    dev-aaf-aaf-sms-vault-consuldata:
      Type:       PersistentVolumeClaim (a reference to a PersistentVolumeClaim in the same namespace)
      ClaimName:  dev-aaf-aaf-sms-vault
      ReadOnly:   false
    dev-aaf-aaf-sms-vault-consulconfiguration:
      Type:      ConfigMap (a volume populated by a ConfigMap)
      Name:      dev-aaf-aaf-sms-vault-consul
      Optional:  false
    dev-aaf-aaf-sms-vault-vault:
      Type:      ConfigMap (a volume populated by a ConfigMap)
      Name:      dev-aaf-aaf-sms-vault-vault
      Optional:  false
    localtime:
      Type:          HostPath (bare host directory volume)
      Path:          /etc/localtime
      HostPathType:  
    default-token-67qsf:
      Type:        Secret (a volume populated by a Secret)
      SecretName:  default-token-67qsf
      Optional:    false
  QoS Class:       BestEffort
  Node-Selectors:  <none>
  Tolerations:     node.kubernetes.io/not-ready:NoExecute for 300s
                   node.kubernetes.io/unreachable:NoExecute for 300s
  Events:
    Type     Reason     Age                   From               Message
    ----     ------     ----                  ----               -------
    Normal   Scheduled  13m                   default-scheduler  Successfully assigned onap/dev-aaf-aaf-sms-vault-0 to dublin03
    Normal   Pulled     13m (x2 over 13m)     kubelet, dublin03  Container image "nexus3.onap.org:10001/library/vault:0.10.0" already present on machine
    Normal   Created    13m (x2 over 13m)     kubelet, dublin03  Created container
    Normal   Started    13m (x2 over 13m)     kubelet, dublin03  Started container
    Normal   Pulled     12m (x4 over 13m)     kubelet, dublin03  Container image "nexus3.onap.org:10001/library/consul:1.0.6" already present on machine
    Normal   Created    12m (x4 over 13m)     kubelet, dublin03  Created container
    Normal   Started    12m (x4 over 13m)     kubelet, dublin03  Started container
    Warning  BackOff    3m33s (x41 over 13m)  kubelet, dublin03  Back-off restarting failed container
  aarna@ubuntu:~/oom/kubernetes$ kubectl describe pod -n onap dev-dmaap-dmaap-dr-node-0
  Name:               dev-dmaap-dmaap-dr-node-0
  Namespace:          onap
  Priority:           0
  PriorityClassName:  <none>
  Node:               dublin03/192.168.122.223
  Start Time:         Sun, 11 Aug 2019 12:48:57 +0000
  Labels:             app=dmaap-dr-node
                      controller-revision-hash=dev-dmaap-dmaap-dr-node-5676874886
                      release=dev-dmaap
                      statefulset.kubernetes.io/pod-name=dev-dmaap-dmaap-dr-node-0
  Annotations:        cni.projectcalico.org/podIP: 10.42.2.18/32
  Status:             Pending
  IP:                 10.42.2.18
  Controlled By:      StatefulSet/dev-dmaap-dmaap-dr-node
  Init Containers:
    dmaap-dr-node-readiness:
      Container ID:  docker://a506ea5a446341665e4b982bc0f95e95e6e45c311d0f43d6f1fd117018fc76e9
      Image:         oomk8s/readiness-check:2.0.0
      Image ID:      docker://sha256:867cb038e1d2445a6e5aedc3b5f970dacc8249ab119d6c2e088e10df886ff51f
      Port:          <none>
      Host Port:     <none>
      Command:
        /root/ready.py
      Args:
        --container-name
        dmaap-dr-prov
      State:          Running
        Started:      Sun, 11 Aug 2019 12:59:33 +0000
      Last State:     Terminated
        Reason:       Error
        Exit Code:    1
        Started:      Sun, 11 Aug 2019 12:49:26 +0000
        Finished:     Sun, 11 Aug 2019 12:59:31 +0000
      Ready:          False
      Restart Count:  1
      Environment:
        NAMESPACE:  onap (v1:metadata.namespace)
      Mounts:
        /var/run/secrets/kubernetes.io/serviceaccount from default-token-67qsf (ro)
    dmaap-dr-node-permission-fixer:
      Container ID:  
      Image:         docker.io/busybox:1.30
      Image ID:      
      Port:          <none>
      Host Port:     <none>
      Command:
        chown
        -Rf
        1000:1001
        /opt/app/datartr
      State:          Waiting
        Reason:       PodInitializing
      Ready:          False
      Restart Count:  0
      Environment:    <none>
      Mounts:
        /opt/app/datartr/logs from dev-dmaap-dmaap-dr-node-event-logs-pvc (rw)
        /opt/app/datartr/spool from dev-dmaap-dmaap-dr-node-spool-data-pvc (rw)
        /var/run/secrets/kubernetes.io/serviceaccount from default-token-67qsf (ro)
  Containers:
    dmaap-dr-node:
      Container ID:   
      Image:          nexus3.onap.org:10001/onap/dmaap/datarouter-node:2.1.0
      Image ID:       
      Ports:          8080/TCP, 8443/TCP
      Host Ports:     0/TCP, 0/TCP
      State:          Waiting
        Reason:       PodInitializing
      Ready:          False
      Restart Count:  0
      Liveness:       tcp-socket :8080 delay=30s timeout=1s period=10s #success=1 #failure=3
      Readiness:      tcp-socket :8080 delay=30s timeout=1s period=10s #success=1 #failure=3
      Environment:    <none>
      Mounts:
        /etc/localtime from localtime (rw)
        /opt/app/datartr/etc/createFeed.sh from dev-dmaap-dmaap-dr-node-create-feed-config (rw)
        /opt/app/datartr/etc/dedicatedFeed.json from dev-dmaap-dmaap-dr-node-create-feed-config (rw)
        /opt/app/datartr/etc/drNodeCadi.properties from dev-dmaap-dmaap-dr-node-config (rw)
        /opt/app/datartr/etc/node.properties from dev-dmaap-dmaap-dr-node-config (rw)
        /opt/app/datartr/logs from dev-dmaap-dmaap-dr-node-event-logs-pvc (rw)
        /opt/app/datartr/spool from dev-dmaap-dmaap-dr-node-spool-data-pvc (rw)
        /var/run/secrets/kubernetes.io/serviceaccount from default-token-67qsf (ro)
    dmaap-dr-node-filebeat-onap:
      Container ID:   
      Image:          docker.elastic.co/beats/filebeat:5.5.0
      Image ID:       
      Port:           <none>
      Host Port:      <none>
      State:          Waiting
        Reason:       PodInitializing
      Ready:          False
      Restart Count:  0
      Environment:    <none>
      Mounts:
        /usr/share/filebeat/data from dev-dmaap-dmaap-dr-node-data-filebeat (rw)
        /usr/share/filebeat/filebeat.yml from dev-dmaap-dmaap-dr-node-filebeat-conf (rw)
        /var/log/onap/datarouter-node from dev-dmaap-dmaap-dr-node-event-logs-pvc (rw)
        /var/run/secrets/kubernetes.io/serviceaccount from default-token-67qsf (ro)
  Conditions:
    Type              Status
    Initialized       False 
    Ready             False 
    ContainersReady   False 
    PodScheduled      True 
  Volumes:
    dev-dmaap-dmaap-dr-node-spool-data-pvc:
      Type:       PersistentVolumeClaim (a reference to a PersistentVolumeClaim in the same namespace)
      ClaimName:  dev-dmaap-dmaap-dr-node-spool-data-pvc-dev-dmaap-dmaap-dr-node-0
      ReadOnly:   false
    dev-dmaap-dmaap-dr-node-event-logs-pvc:
      Type:       PersistentVolumeClaim (a reference to a PersistentVolumeClaim in the same namespace)
      ClaimName:  dev-dmaap-dmaap-dr-node-event-logs-pvc-dev-dmaap-dmaap-dr-node-0
      ReadOnly:   false
    localtime:
      Type:          HostPath (bare host directory volume)
      Path:          /etc/localtime
      HostPathType:  
    dev-dmaap-dmaap-dr-node-create-feed-config:
      Type:      ConfigMap (a volume populated by a ConfigMap)
      Name:      dev-dmaap-dmaap-dr-node-create-feed-configmap
      Optional:  false
    dev-dmaap-dmaap-dr-node-config:
      Type:      ConfigMap (a volume populated by a ConfigMap)
      Name:      dev-dmaap-dmaap-dr-node-configmap
      Optional:  false
    dev-dmaap-dmaap-dr-node-log-conf:
      Type:      ConfigMap (a volume populated by a ConfigMap)
      Name:      dev-dmaap-dmaap-dr-node-log
      Optional:  false
    dev-dmaap-dmaap-dr-node-filebeat-conf:
      Type:      ConfigMap (a volume populated by a ConfigMap)
      Name:      dev-dmaap-dmaap-filebeat-configmap
      Optional:  false
    dev-dmaap-dmaap-dr-node-data-filebeat:
      Type:    EmptyDir (a temporary directory that shares a pod's lifetime)
      Medium:  
    default-token-67qsf:
      Type:        Secret (a volume populated by a Secret)
      SecretName:  default-token-67qsf
      Optional:    false
  QoS Class:       BestEffort
  Node-Selectors:  <none>
  Tolerations:     node.kubernetes.io/not-ready:NoExecute for 300s
                   node.kubernetes.io/unreachable:NoExecute for 300s
  Events:
    Type     Reason            Age                  From               Message
    ----     ------            ----                 ----               -------
    Warning  FailedScheduling  13m (x4 over 13m)    default-scheduler  pod has unbound immediate PersistentVolumeClaims (repeated 2 times)
    Normal   Scheduled         13m                  default-scheduler  Successfully assigned onap/dev-dmaap-dmaap-dr-node-0 to dublin03
    Normal   Pulled            3m14s (x2 over 13m)  kubelet, dublin03  Container image "oomk8s/readiness-check:2.0.0" already present on machine
    Normal   Created           3m13s (x2 over 13m)  kubelet, dublin03  Created container
    Normal   Started           3m13s (x2 over 13m)  kubelet, dublin03  Started container
  aarna@ubuntu:~/oom/kubernetes$ kubectl describe pod -n onap dev-dmaap-dmaap-dr-prov-8677d4cbd6-w8vlg
  Name:               dev-dmaap-dmaap-dr-prov-8677d4cbd6-w8vlg
  Namespace:          onap
  Priority:           0
  PriorityClassName:  <none>
  Node:               dublin03/192.168.122.223
  Start Time:         Sun, 11 Aug 2019 12:48:57 +0000
  Labels:             app=dmaap-dr-prov
                      pod-template-hash=8677d4cbd6
                      release=dev-dmaap
  Annotations:        cni.projectcalico.org/podIP: 10.42.2.11/32
  Status:             Pending
  IP:                 10.42.2.11
  Controlled By:      ReplicaSet/dev-dmaap-dmaap-dr-prov-8677d4cbd6
  Init Containers:
    dmaap-dr-prov-readiness:
      Container ID:  docker://b9a01a41bb27d34727019c72071e7e8eae0d651064a9b77e65cdf1f8709eb417
      Image:         oomk8s/readiness-check:2.0.0
      Image ID:      docker://sha256:867cb038e1d2445a6e5aedc3b5f970dacc8249ab119d6c2e088e10df886ff51f
      Port:          <none>
      Host Port:     <none>
      Command:
        /root/ready.py
      Args:
        --container-name
        dmaap-dr-db
      State:          Running
        Started:      Sun, 11 Aug 2019 12:59:17 +0000
      Last State:     Terminated
        Reason:       Error
        Exit Code:    1
        Started:      Sun, 11 Aug 2019 12:49:11 +0000
        Finished:     Sun, 11 Aug 2019 12:59:16 +0000
      Ready:          False
      Restart Count:  1
      Environment:
        NAMESPACE:  onap (v1:metadata.namespace)
      Mounts:
        /var/run/secrets/kubernetes.io/serviceaccount from default-token-67qsf (ro)
  Containers:
    dmaap-dr-prov:
      Container ID:   
      Image:          nexus3.onap.org:10001/onap/dmaap/datarouter-prov:2.1.0
      Image ID:       
      Ports:          8080/TCP, 8443/TCP
      Host Ports:     0/TCP, 0/TCP
      State:          Waiting
        Reason:       PodInitializing
      Ready:          False
      Restart Count:  0
      Liveness:       tcp-socket :8080 delay=30s timeout=1s period=10s #success=1 #failure=3
      Readiness:      tcp-socket :8080 delay=30s timeout=1s period=10s #success=1 #failure=3
      Environment:    <none>
      Mounts:
        /etc/localtime from localtime (rw)
        /opt/app/datartr/etc/drProvCadi.properties from dev-dmaap-dmaap-dr-prov-config (rw)
        /opt/app/datartr/etc/provserver.properties from dev-dmaap-dmaap-dr-prov-config (rw)
        /opt/app/datartr/logs from dev-dmaap-dmaap-dr-prov-logs (rw)
        /var/run/secrets/kubernetes.io/serviceaccount from default-token-67qsf (ro)
    dmaap-dr-prov-filebeat-onap:
      Container ID:   
      Image:          docker.elastic.co/beats/filebeat:5.5.0
      Image ID:       
      Port:           <none>
      Host Port:      <none>
      State:          Waiting
        Reason:       PodInitializing
      Ready:          False
      Restart Count:  0
      Environment:    <none>
      Mounts:
        /usr/share/filebeat/data from dev-dmaap-dmaap-dr-prov-data-filebeat (rw)
        /usr/share/filebeat/filebeat.yml from dev-dmaap-dmaap-dr-prov-filebeat-conf (rw)
        /var/log/onap/datarouter-prov from dev-dmaap-dmaap-dr-prov-logs (rw)
        /var/run/secrets/kubernetes.io/serviceaccount from default-token-67qsf (ro)
  Conditions:
    Type              Status
    Initialized       False 
    Ready             False 
    ContainersReady   False 
    PodScheduled      True 
  Volumes:
    localtime:
      Type:          HostPath (bare host directory volume)
      Path:          /etc/localtime
      HostPathType:  
    dev-dmaap-dmaap-dr-prov-config:
      Type:      ConfigMap (a volume populated by a ConfigMap)
      Name:      dev-dmaap-dmaap-dr-prov-configmap
      Optional:  false
    dev-dmaap-dmaap-dr-prov-log-conf:
      Type:      ConfigMap (a volume populated by a ConfigMap)
      Name:      dev-dmaap-dmaap-dr-prov-log
      Optional:  false
    dev-dmaap-dmaap-dr-prov-filebeat-conf:
      Type:      ConfigMap (a volume populated by a ConfigMap)
      Name:      dev-dmaap-dmaap-filebeat-configmap
      Optional:  false
    dev-dmaap-dmaap-dr-prov-data-filebeat:
      Type:    EmptyDir (a temporary directory that shares a pod's lifetime)
      Medium:  
    dev-dmaap-dmaap-dr-prov-logs:
      Type:    EmptyDir (a temporary directory that shares a pod's lifetime)
      Medium:  
    default-token-67qsf:
      Type:        Secret (a volume populated by a Secret)
      SecretName:  default-token-67qsf
      Optional:    false
  QoS Class:       BestEffort
  Node-Selectors:  <none>
  Tolerations:     node.kubernetes.io/not-ready:NoExecute for 300s
                   node.kubernetes.io/unreachable:NoExecute for 300s
  Events:
    Type    Reason     Age                  From               Message
    ----    ------     ----                 ----               -------
    Normal  Scheduled  14m                  default-scheduler  Successfully assigned onap/dev-dmaap-dmaap-dr-prov-8677d4cbd6-w8vlg to dublin03
    Normal  Pulled     3m41s (x2 over 13m)  kubelet, dublin03  Container image "oomk8s/readiness-check:2.0.0" already present on machine
    Normal  Created    3m41s (x2 over 13m)  kubelet, dublin03  Created container
    Normal  Started    3m41s (x2 over 13m)  kubelet, dublin03  Started container

Thanks
Raghu