SC describe output:
root@42126ac4891cde8ba1b27a61eb1ffbb9 [ ~ ]# kdsc sc vsan-d-policy
Name: vsan-d-policy
IsDefaultClass: No
Annotations: cns.vmware.com/StoragePoolTypeHint=cns.vmware.com/vsanD
Provisioner: csi.vsphere.vmware.com
Parameters: storagePolicyID=6536f0ca-88e8-4381-bba1-e5dd20367e6f
AllowVolumeExpansion: True
MountOptions: <none>
ReclaimPolicy: Delete
VolumeBindingMode: WaitForFirstConsumer
Events: <none>
Nodes in environment:
root@42126ac4891cde8ba1b27a61eb1ffbb9 [ ~ ]# k get nodes -L kubernetes.io/hostname
NAME STATUS ROLES AGE VERSION HOSTNAME
42122407b286669937e3701d5a691ac8 Ready control-plane,master 10d v1.21.0+vmware.1-wcp 42122407b286669937e3701d5a691ac8
42126ac4891cde8ba1b27a61eb1ffbb9 Ready control-plane,master 10d v1.21.0+vmware.1-wcp 42126ac4891cde8ba1b27a61eb1ffbb9
4212ff9a463edc5ee2e0e01c25d6ad1e Ready control-plane,master 10d v1.21.0+vmware.1-wcp 4212ff9a463edc5ee2e0e01c25d6ad1e
sc2-10-184-103-28.eng.vmware.com Ready agent 10d v1.21.0-sph-2f65065 sc2-10-184-103-28.eng.vmware.com
sc2-10-184-104-27.eng.vmware.com Ready agent 10d v1.21.0-sph-2f65065 sc2-10-184-104-27.eng.vmware.com
sc2-10-184-106-125.eng.vmware.com Ready agent 10d v1.21.0-sph-2f65065 sc2-10-184-106-125.eng.vmware.com
sc2-10-184-108-140.eng.vmware.com Ready agent 10d v1.21.0-sph-2f65065 sc2-10-184-108-140.eng.vmware.com
StoragePool in environment - 4 vSAN Direct datastores, one on each worker node:
root@42126ac4891cde8ba1b27a61eb1ffbb9 [ ~ ]# k get storagepool
NAME AGE
storagepool-nfs0-1 10d
storagepool-vsand-10.184.103.28-mpx.vmhba0-c0-t2-l0 28h
storagepool-vsand-10.184.104.27-mpx.vmhba0-c0-t2-l0 28h
storagepool-vsand-10.184.106.125-mpx.vmhba0-c0-t2-l0 28h
storagepool-vsand-10.184.108.140-mpx.vmhba0-c0-t2-l0 28h
storagepool-vsandatastore 10d
storagepool-vsandatastore-sc2-10-184-103-28.eng.vmware.com 10d
storagepool-vsandatastore-sc2-10-184-104-27.eng.vmware.com 10d
storagepool-vsandatastore-sc2-10-184-106-125.eng.vmware.com 10d
storagepool-vsandatastore-sc2-10-184-108-140.eng.vmware.com 10d
PVC definition with storagepool-vsand-10.184.103.28-mpx.vmhba0-c0-t2-l0
storagepool chosen for volume provisioning:
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: pvc-vsand
namespace: e2e-test-namespace
annotations:
volume.beta.kubernetes.io/storage-class: vsan-d-policy
failure-domain.beta.vmware.com/storagepool: storagepool-vsand-10.184.103.28-mpx.vmhba0-c0-t2-l0
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 100Mi
PVC stays in pending mode till Pod is created:
root@42126ac4891cde8ba1b27a61eb1ffbb9 [ ~ ]# kdsc pvc pvc-vsand -n e2e-test-namespace
Name: pvc-vsand
Namespace: e2e-test-namespace
StorageClass: vsan-d-policy
Status: Pending
Volume:
Labels: <none>
Annotations: failure-domain.beta.vmware.com/storagepool: storagepool-vsand-10.184.103.28-mpx.vmhba0-c0-t2-l0
volume.beta.kubernetes.io/storage-class: vsan-d-policy
Finalizers: [kubernetes.io/pvc-protection]
Capacity:
Access Modes:
VolumeMode: Filesystem
Used By: <none>
Events:
Type Reason Age From Message
---- ------ ---- ---- -------
Normal WaitForFirstConsumer 14s (x2 over 16s) persistentvolume-controller waiting for first consumer to be created before binding
Pod definition:
apiVersion: v1
kind: Pod
metadata:
name: block-pod-vsand
namespace: e2e-test-namespace
spec:
containers:
- name: test-container
image: gcr.io/google_containers/busybox:1.24
command: ["/bin/sh", "-c", "echo 'hello' > /mnt/volume1/index.html && chmod o+rX /mnt /mnt/volume1/index.html && while true ; do sleep 2 ; done"]
volumeMounts:
- name: test-volume
mountPath: /mnt/volume1
restartPolicy: Never
volumes:
- name: test-volume
persistentVolumeClaim:
claimName: pvc-vsand
Logs:
{"level":"info","time":"2021-07-23T20:33:07.41367598Z","caller":"wcp/controller.go:516","msg":"CreateVolume: called with args {Name:pvc-f20586c4-9144-4437-bd9c-7a5260e8f13f CapacityRange:required_bytes:104857600 VolumeCapabilities:[mount:<fs_type:\"ext4\" > access_mode:<mode:SINGLE_NODE_WRITER > ] Parameters:map[storagePolicyID:6536f0ca-88e8-4381-bba1-e5dd20367e6f storagePool:storagepool-vsand-10.184.103.28-mpx.vmhba0-c0-t2-l0] Secrets:map[] VolumeContentSource:<nil>
AccessibilityRequirements:
requisite:<segments:<key:\"kubernetes.io/hostname\" value:\"sc2-10-184-108-140.eng.vmware.com\" > >
requisite:<segments:<key:\"kubernetes.io/hostname\" value:\"42122407b286669937e3701d5a691ac8\" > >
requisite:<segments:<key:\"kubernetes.io/hostname\" value:\"42126ac4891cde8ba1b27a61eb1ffbb9\" > >
requisite:<segments:<key:\"kubernetes.io/hostname\" value:\"4212ff9a463edc5ee2e0e01c25d6ad1e\" > >
requisite:<segments:<key:\"kubernetes.io/hostname\" value:\"sc2-10-184-103-28.eng.vmware.com\" > >
requisite:<segments:<key:\"kubernetes.io/hostname\" value:\"sc2-10-184-104-27.eng.vmware.com\" > >
requisite:<segments:<key:\"kubernetes.io/hostname\" value:\"sc2-10-184-106-125.eng.vmware.com\" > >
preferred:<segments:<key:\"kubernetes.io/hostname\" value:\"sc2-10-184-104-27.eng.vmware.com\" > >
preferred:<segments:<key:\"kubernetes.io/hostname\" value:\"sc2-10-184-106-125.eng.vmware.com\" > >
preferred:<segments:<key:\"kubernetes.io/hostname\" value:\"sc2-10-184-108-140.eng.vmware.com\" > >
preferred:<segments:<key:\"kubernetes.io/hostname\" value:\"42122407b286669937e3701d5a691ac8\" > >
preferred:<segments:<key:\"kubernetes.io/hostname\" value:\"42126ac4891cde8ba1b27a61eb1ffbb9\" > >
preferred:<segments:<key:\"kubernetes.io/hostname\" value:\"4212ff9a463edc5ee2e0e01c25d6ad1e\" > >
preferred:<segments:<key:\"kubernetes.io/hostname\" value:\"sc2-10-184-103-28.eng.vmware.com\" > > XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0}","TraceId":"b52df442-12e3-4be7-8168-ea89b445bc5a"}
Volume created on node sc2-10-184-103-28.eng.vmware.com
:
{"level":"info","time":"2021-07-23T20:33:07.446373617Z","caller":"wcp/controller.go:348","msg":"Storage pool Accessible nodes for volume topology: [sc2-10-184-103-28.eng.vmware.com]","TraceId":"b52df442-12e3-4be7-8168-ea89b445bc5a"}
{"level":"info","time":"2021-07-23T20:33:07.461470205Z","caller":"wcp/controller.go:356","msg":"Will select datastore ds:///vmfs/volumes/60f9b3bd-9e733963-e32b-020068c3a6ae/ as per the provided storage pool storagepool-vsand-10.184.103.28-mpx.vmhba0-c0-t2-l0","TraceId":"b52df442-12e3-4be7-8168-ea89b445bc5a"}
{"level":"info","time":"2021-07-23T20:33:08.41471546Z","caller":"volume/manager.go:380","msg":"CreateVolume: VolumeName: \"pvc-f20586c4-9144-4437-bd9c-7a5260e8f13f\", opId: \"5b667b75\"","TraceId":"b52df442-12e3-4be7-8168-ea89b445bc5a"}
{"level":"info","time":"2021-07-23T20:33:08.423566306Z","caller":"volume/util.go:318","msg":"Volume created successfully. VolumeName: \"pvc-f20586c4-9144-4437-bd9c-7a5260e8f13f\", volumeID: \"e1efe3b2-5605-4f07-adb7-ca27b5c0c206\"","TraceId":"b52df442-12e3-4be7-8168-ea89b445bc5a"}
Describe outputs of PVC:
root@42126ac4891cde8ba1b27a61eb1ffbb9 [ ~ ]# kdsc pvc -n e2e-test-namespace pvc-vsand
Name: pvc-vsand
Namespace: e2e-test-namespace
StorageClass: vsan-d-policy
Status: Bound
Volume: pvc-f20586c4-9144-4437-bd9c-7a5260e8f13f
Labels: <none>
Annotations: failure-domain.beta.vmware.com/storagepool: storagepool-vsand-10.184.103.28-mpx.vmhba0-c0-t2-l0
pv.kubernetes.io/bind-completed: yes
pv.kubernetes.io/bound-by-controller: yes
volume.beta.kubernetes.io/storage-class: vsan-d-policy
volume.beta.kubernetes.io/storage-provisioner: csi.vsphere.vmware.com
volume.kubernetes.io/selected-node: sc2-10-184-104-27.eng.vmware.com
Finalizers: [kubernetes.io/pvc-protection]
Capacity: 100Mi
Access Modes: RWO
VolumeMode: Filesystem
Used By: block-pod-vsand
Events:
Type Reason Age From Message
---- ------ ---- ---- -------
Normal WaitForFirstConsumer 66s (x5 over 117s) persistentvolume-controller waiting for first consumer to be created before binding
Normal ExternalProvisioning 62s (x2 over 62s) persistentvolume-controller waiting for a volume to be created, either by external provisioner "csi.vsphere.vmware.com" or manually created by system administrator
Normal Provisioning 62s csi.vsphere.vmware.com_4212ff9a463edc5ee2e0e01c25d6ad1e_3accebad-d290-40a9-9bfe-98a9937a5b3b External provisioner is provisioning volume for claim "e2e-test-namespace/pvc-vsand"
Normal ProvisioningSucceeded 61s csi.vsphere.vmware.com_4212ff9a463edc5ee2e0e01c25d6ad1e_3accebad-d290-40a9-9bfe-98a9937a5b3b Successfully provisioned volume pvc-f20586c4-9144-4437-bd9c-7a5260e8f13f
Describe outputs of PV:
root@42126ac4891cde8ba1b27a61eb1ffbb9 [ ~ ]# kdsc pv pvc-f20586c4-9144-4437-bd9c-7a5260e8f13f
Name: pvc-f20586c4-9144-4437-bd9c-7a5260e8f13f
Labels: <none>
Annotations: pv.kubernetes.io/provisioned-by: csi.vsphere.vmware.com
Finalizers: [kubernetes.io/pv-protection external-attacher/csi-vsphere-vmware-com]
StorageClass: vsan-d-policy
Status: Bound
Claim: e2e-test-namespace/pvc-vsand
Reclaim Policy: Delete
Access Modes: RWO
VolumeMode: Filesystem
Capacity: 100Mi
Node Affinity:
Required Terms:
Term 0: kubernetes.io/hostname in [sc2-10-184-103-28.eng.vmware.com]
Message:
Source:
Type: CSI (a Container Storage Interface (CSI) volume source)
Driver: csi.vsphere.vmware.com
FSType: ext4
VolumeHandle: e1efe3b2-5605-4f07-adb7-ca27b5c0c206
ReadOnly: false
VolumeAttributes: storage.kubernetes.io/csiProvisionerIdentity=1627072292358-8081-csi.vsphere.vmware.com
type=vSphere CNS Block Volume
Events: <none>
Pod fails to get into Running state as volume and podVM are on two different nodes:
root@42126ac4891cde8ba1b27a61eb1ffbb9 [ ~ ]# kdsc pod -n e2e-test-namespace block-pod-vsand
Name: block-pod-vsand
Namespace: e2e-test-namespace
Priority: 0
Node: sc2-10-184-104-27.eng.vmware.com/
Labels: <none>
Annotations: kubernetes.io/psp: e2e-test-privileged-psp
vmware-system-ephemeral-disk-uuid: 6000C29a-e419-7176-9718-ca510da8cc4a
vmware-system-image-references: {"test-container":"busybox-f462ad8139900f32e74c30158195dda0e3d9ec8e-v42278"}
vmware-system-vm-moid: vm-1078:5047474f-06de-42c7-9a4d-2decebf9f049
vmware-system-vm-uuid: 5012a89b-1c3b-3a8c-4085-98592f68a0b9
Status: Pending
IP:
IPs: <none>
Containers:
test-container:
Image: gcr.io/google_containers/busybox:1.24
Port: <none>
Host Port: <none>
Command:
/bin/sh
-c
echo 'hello' > /mnt/volume1/index.html && chmod o+rX /mnt /mnt/volume1/index.html && while true ; do sleep 2 ; done
Environment: <none>
Mounts:
/mnt/volume1 from test-volume (rw)
/var/run/secrets/kubernetes.io/serviceaccount from default-token-b4vdb (ro)
Conditions:
Type Status
PodScheduled True
Volumes:
test-volume:
Type: PersistentVolumeClaim (a reference to a PersistentVolumeClaim in the same namespace)
ClaimName: pvc-vsand
ReadOnly: false
default-token-b4vdb:
Type: Secret (a volume populated by a Secret)
SecretName: default-token-b4vdb
Optional: false
QoS Class: BestEffort
Node-Selectors: <none>
Tolerations: node.kubernetes.io/not-ready:NoExecute op=Exists for 300s
node.kubernetes.io/unreachable:NoExecute op=Exists for 300s
Events:
Type Reason Age From Message
---- ------ ---- ---- -------
Warning FailedScheduling 23s default-scheduler running PreBind plugin "VolumeBinding": binding volumes: pv "pvc-6d8f7b0c-bd96-4c26-9e13-61b63dd36e52" node affinity doesn't match node "sc2-10-184-104-27.eng.vmware.com": no matching NodeSelectorTerms
Normal Scheduled 21s default-scheduler Successfully assigned e2e-test-namespace/block-pod-vsand to sc2-10-184-104-27.eng.vmware.com
Normal Image 22s image-controller Image busybox-f462ad8139900f32e74c30158195dda0e3d9ec8e-v42278 bound successfully
Warning FailedAttachVolume 9s (x2 over 10s) attachdetach-controller AttachVolume.Attach failed for volume "pvc-6d8f7b0c-bd96-4c26-9e13-61b63dd36e52" : rpc error: code = Internal desc = failed to attach volume with volumeID: 999eb37a-400f-470d-ac6c-7edc3d6495a0. Error: failed to attach cns volume: "999eb37a-400f-470d-ac6c-7edc3d6495a0" to node vm: "VirtualMachine:vm-1078 [VirtualCenterHost: sc2-10-184-101-184.eng.vmware.com, UUID: 5012a89b-1c3b-3a8c-4085-98592f68a0b9, Datacenter: Datacenter [Datacenter: Datacenter:datacenter-59, VirtualCenterHost: sc2-10-184-101-184.eng.vmware.com]]". fault: "(*types.LocalizedMethodFault)(0xc000b1f5c0)({\n DynamicData: (types.DynamicData) {\n },\n Fault: (types.CnsFault) {\n BaseMethodFault: (types.BaseMethodFault) <nil>,\n Reason: (string) (len=16) \"VSLM task failed\"\n },\n LocalizedMessage: (string) (len=32) \"CnsFault error: VSLM task failed\"\n})\n". opId: "5b668b88"
Warning FailedAttachVolume 8s attachdetach-controller AttachVolume.Attach failed for volume "pvc-6d8f7b0c-bd96-4c26-9e13-61b63dd36e52" : rpc error: code = Internal desc = failed to attach volume with volumeID: 999eb37a-400f-470d-ac6c-7edc3d6495a0. Error: failed to attach cns volume: "999eb37a-400f-470d-ac6c-7edc3d6495a0" to node vm: "VirtualMachine:vm-1078 [VirtualCenterHost: sc2-10-184-101-184.eng.vmware.com, UUID: 5012a89b-1c3b-3a8c-4085-98592f68a0b9, Datacenter: Datacenter [Datacenter: Datacenter:datacenter-59, VirtualCenterHost: sc2-10-184-101-184.eng.vmware.com]]". fault: "(*types.LocalizedMethodFault)(0xc000b9b2c0)({\n DynamicData: (types.DynamicData) {\n },\n Fault: (types.CnsFault) {\n BaseMethodFault: (types.BaseMethodFault) <nil>,\n Reason: (string) (len=16) \"VSLM task failed\"\n },\n LocalizedMessage: (string) (len=32) \"CnsFault error: VSLM task failed\"\n})\n". opId: "5b668b90"
Warning FailedAttachVolume 5s attachdetach-controller AttachVolume.Attach failed for volume "pvc-6d8f7b0c-bd96-4c26-9e13-61b63dd36e52" : rpc error: code = Internal desc = failed to attach volume with volumeID: 999eb37a-400f-470d-ac6c-7edc3d6495a0. Error: failed to attach cns volume: "999eb37a-400f-470d-ac6c-7edc3d6495a0" to node vm: "VirtualMachine:vm-1078 [VirtualCenterHost: sc2-10-184-101-184.eng.vmware.com, UUID: 5012a89b-1c3b-3a8c-4085-98592f68a0b9, Datacenter: Datacenter [Datacenter: Datacenter:datacenter-59, VirtualCenterHost: sc2-10-184-101-184.eng.vmware.com]]". fault: "(*types.LocalizedMethodFault)(0xc00084b180)({\n DynamicData: (types.DynamicData) {\n },\n Fault: (types.CnsFault) {\n BaseMethodFault: (types.BaseMethodFault) <nil>,\n Reason: (string) (len=16) \"VSLM task failed\"\n },\n LocalizedMessage: (string) (len=32) \"CnsFault error: VSLM task failed\"\n})\n". opId: "5b668b94"
Warning FailedAttachVolume 0s attachdetach-controller AttachVolume.Attach failed for volume "pvc-6d8f7b0c-bd96-4c26-9e13-61b63dd36e52" : rpc error: code = Internal desc = failed to attach volume with volumeID: 999eb37a-400f-470d-ac6c-7edc3d6495a0. Error: failed to attach cns volume: "999eb37a-400f-470d-ac6c-7edc3d6495a0" to node vm: "VirtualMachine:vm-1078 [VirtualCenterHost: sc2-10-184-101-184.eng.vmware.com, UUID: 5012a89b-1c3b-3a8c-4085-98592f68a0b9, Datacenter: Datacenter [Datacenter: Datacenter:datacenter-59, VirtualCenterHost: sc2-10-184-101-184.eng.vmware.com]]". fault: "(*types.LocalizedMethodFault)(0xc000499020)({\n DynamicData: (types.DynamicData) {\n },\n Fault: (types.CnsFault) {\n BaseMethodFault: (types.BaseMethodFault) <nil>,\n Reason: (string) (len=16) \"VSLM task failed\"\n },\n LocalizedMessage: (string) (len=32) \"CnsFault error: VSLM task failed\"\n})\n". opId: "5b668b97"
Logs:
{"level":"info","time":"2021-07-23T20:33:11.538620039Z","caller":"wcp/controller.go:603","msg":"ControllerPublishVolume: called with args {VolumeId:e1efe3b2-5605-4f07-adb7-ca27b5c0c206 NodeId:sc2-10-184-104-27.eng.vmware.com VolumeCapability:mount:<fs_type:\"ext4\" > access_mode:<mode:SINGLE_NODE_WRITER > Readonly:false Secrets:map[] VolumeContext:map[storage.kubernetes.io/csiProvisionerIdentity:1627072292358-8081-csi.vsphere.vmware.com type:vSphere CNS Block Volume] XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0}","TraceId":"cd3e62c8-72a5-46e5-9cbd-03b9f353a8d1"}
{"level":"info","time":"2021-07-23T20:33:11.539521613Z","caller":"common/util.go:346","msg":"Connecting to K8s Cloud Operator Service on port 29000","TraceId":"cd3e62c8-72a5-46e5-9cbd-03b9f353a8d1"}
{"level":"info","time":"2021-07-23T20:33:15.615901382Z","caller":"wcp/controller_helper.go:250","msg":"Got vmuuid: 50124d64-6a1c-eb92-c1a4-27caa7878758 annotation from K8sCloudOperator gRPC service","TraceId":"cd3e62c8-72a5-46e5-9cbd-03b9f353a8d1"}
{"level":"info","time":"2021-07-23T20:33:16.018027952Z","caller":"volume/manager.go:484","msg":"AttachVolume: volumeID: \"e1efe3b2-5605-4f07-adb7-ca27b5c0c206\", vm: \"VirtualMachine:vm-1076 [VirtualCenterHost: sc2-10-184-101-184.eng.vmware.com, UUID: 50124d64-6a1c-eb92-c1a4-27caa7878758, Datacenter: Datacenter [Datacenter: Datacenter:datacenter-59, VirtualCenterHost: sc2-10-184-101-184.eng.vmware.com]]\", opId: \"5b667b81\"","TraceId":"cd3e62c8-72a5-46e5-9cbd-03b9f353a8d1"}
{"level":"error","time":"2021-07-23T20:33:16.018512048Z","caller":"volume/manager.go:512","msg":"failed to attach cns volume: \"e1efe3b2-5605-4f07-adb7-ca27b5c0c206\" to node vm: \"VirtualMachine:vm-1076 [VirtualCenterHost: sc2-10-184-101-184.eng.vmware.com, UUID: 50124d64-6a1c-eb92-c1a4-27caa7878758, Datacenter: Datacenter [Datacenter: Datacenter:datacenter-59, VirtualCenterHost: sc2-10-184-101-184.eng.vmware.com]]\". fault: \"(*types.LocalizedMethodFault)(0xc000860fe0)({\\n DynamicData: (types.DynamicData) {\\n },\\n Fault: (types.CnsFault) {\\n BaseMethodFault: (types.BaseMethodFault) <nil>,\\n Reason: (string) (len=16) \\\"VSLM task failed\\\"\\n },\\n LocalizedMessage: (string) (len=32) \\\"CnsFault error: VSLM task failed\\\"\\n})\\n\". opId: \"5b667b81\"","TraceId":"cd3e62c8-72a5-46e5-9cbd-03b9f353a8d1","stacktrace":"sigs.k8s.io/vsphere-csi-driver/pkg/common/cns-lib/volume.(*defaultManager).AttachVolume.func1\n\t/build/pkg/common/cns-lib/volume/manager.go:512\nsigs.k8s.io/vsphere-csi-driver/pkg/common/cns-lib/volume.(*defaultManager).AttachVolume\n\t/build/pkg/common/cns-lib/volume/manager.go:521\nsigs.k8s.io/vsphere-csi-driver/pkg/csi/service/common.AttachVolumeUtil\n\t/build/pkg/csi/service/common/vsphereutil.go:456\nsigs.k8s.io/vsphere-csi-driver/pkg/csi/service/wcp.(*controller).ControllerPublishVolume.func1\n\t/build/pkg/csi/service/wcp/controller.go:652\nsigs.k8s.io/vsphere-csi-driver/pkg/csi/service/wcp.(*controller).ControllerPublishVolume\n\t/build/pkg/csi/service/wcp/controller.go:701\ngithub.com/container-storage-interface/spec/lib/go/csi._Controller_ControllerPublishVolume_Handler.func1\n\t/go/pkg/mod/github.com/container-storage-interface/spec@v1.4.0/lib/go/csi/csi.pb.go:5632\ngithub.com/rexray/gocsi/middleware/serialvolume.(*interceptor).controllerPublishVolume\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/middleware/serialvolume/serial_volume_locker.go:120\ngithub.com/rexray/gocsi/middleware/serialvolume.(*interceptor).handle\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/middleware/serialvolume/serial_volume_locker.go:86\ngithub.com/rexray/gocsi/utils.ChainUnaryServer.func2.1.1\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/utils/utils_middleware.go:99\ngithub.com/rexray/gocsi/middleware/specvalidator.(*interceptor).handleServer.func1\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/middleware/specvalidator/spec_validator.go:178\ngithub.com/rexray/gocsi/middleware/specvalidator.(*interceptor).handle\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/middleware/specvalidator/spec_validator.go:218\ngithub.com/rexray/gocsi/middleware/specvalidator.(*interceptor).handleServer\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/middleware/specvalidator/spec_validator.go:177\ngithub.com/rexray/gocsi/utils.ChainUnaryServer.func2.1.1\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/utils/utils_middleware.go:99\ngithub.com/rexray/gocsi.(*StoragePlugin).injectContext\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/middleware.go:231\ngithub.com/rexray/gocsi/utils.ChainUnaryServer.func2.1.1\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/utils/utils_middleware.go:99\ngithub.com/rexray/gocsi/utils.ChainUnaryServer.func2\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/utils/utils_middleware.go:106\ngithub.com/container-storage-interface/spec/lib/go/csi._Controller_ControllerPublishVolume_Handler\n\t/go/pkg/mod/github.com/container-storage-interface/spec@v1.4.0/lib/go/csi/csi.pb.go:5634\ngoogle.golang.org/grpc.(*Server).processUnaryRPC\n\t/go/pkg/mod/google.golang.org/grpc@v1.26.0/server.go:1024\ngoogle.golang.org/grpc.(*Server).handleStream\n\t/go/pkg/mod/google.golang.org/grpc@v1.26.0/server.go:1313\ngoogle.golang.org/grpc.(*Server).serveStreams.func1.1\n\t/go/pkg/mod/google.golang.org/grpc@v1.26.0/server.go:722"}
{"level":"error","time":"2021-07-23T20:33:16.023788829Z","caller":"common/vsphereutil.go:458","msg":"failed to attach disk \"e1efe3b2-5605-4f07-adb7-ca27b5c0c206\" with VM: \"VirtualMachine:vm-1076 [VirtualCenterHost: sc2-10-184-101-184.eng.vmware.com, UUID: 50124d64-6a1c-eb92-c1a4-27caa7878758, Datacenter: Datacenter [Datacenter: Datacenter:datacenter-59, VirtualCenterHost: sc2-10-184-101-184.eng.vmware.com]]\". err: failed to attach cns volume: \"e1efe3b2-5605-4f07-adb7-ca27b5c0c206\" to node vm: \"VirtualMachine:vm-1076 [VirtualCenterHost: sc2-10-184-101-184.eng.vmware.com, UUID: 50124d64-6a1c-eb92-c1a4-27caa7878758, Datacenter: Datacenter [Datacenter: Datacenter:datacenter-59, VirtualCenterHost: sc2-10-184-101-184.eng.vmware.com]]\". fault: \"(*types.LocalizedMethodFault)(0xc000860fe0)({\\n DynamicData: (types.DynamicData) {\\n },\\n Fault: (types.CnsFault) {\\n BaseMethodFault: (types.BaseMethodFault) <nil>,\\n Reason: (string) (len=16) \\\"VSLM task failed\\\"\\n },\\n LocalizedMessage: (string) (len=32) \\\"CnsFault error: VSLM task failed\\\"\\n})\\n\". opId: \"5b667b81\"","TraceId":"cd3e62c8-72a5-46e5-9cbd-03b9f353a8d1","stacktrace":"sigs.k8s.io/vsphere-csi-driver/pkg/csi/service/common.AttachVolumeUtil\n\t/build/pkg/csi/service/common/vsphereutil.go:458\nsigs.k8s.io/vsphere-csi-driver/pkg/csi/service/wcp.(*controller).ControllerPublishVolume.func1\n\t/build/pkg/csi/service/wcp/controller.go:652\nsigs.k8s.io/vsphere-csi-driver/pkg/csi/service/wcp.(*controller).ControllerPublishVolume\n\t/build/pkg/csi/service/wcp/controller.go:701\ngithub.com/container-storage-interface/spec/lib/go/csi._Controller_ControllerPublishVolume_Handler.func1\n\t/go/pkg/mod/github.com/container-storage-interface/spec@v1.4.0/lib/go/csi/csi.pb.go:5632\ngithub.com/rexray/gocsi/middleware/serialvolume.(*interceptor).controllerPublishVolume\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/middleware/serialvolume/serial_volume_locker.go:120\ngithub.com/rexray/gocsi/middleware/serialvolume.(*interceptor).handle\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/middleware/serialvolume/serial_volume_locker.go:86\ngithub.com/rexray/gocsi/utils.ChainUnaryServer.func2.1.1\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/utils/utils_middleware.go:99\ngithub.com/rexray/gocsi/middleware/specvalidator.(*interceptor).handleServer.func1\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/middleware/specvalidator/spec_validator.go:178\ngithub.com/rexray/gocsi/middleware/specvalidator.(*interceptor).handle\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/middleware/specvalidator/spec_validator.go:218\ngithub.com/rexray/gocsi/middleware/specvalidator.(*interceptor).handleServer\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/middleware/specvalidator/spec_validator.go:177\ngithub.com/rexray/gocsi/utils.ChainUnaryServer.func2.1.1\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/utils/utils_middleware.go:99\ngithub.com/rexray/gocsi.(*StoragePlugin).injectContext\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/middleware.go:231\ngithub.com/rexray/gocsi/utils.ChainUnaryServer.func2.1.1\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/utils/utils_middleware.go:99\ngithub.com/rexray/gocsi/utils.ChainUnaryServer.func2\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/utils/utils_middleware.go:106\ngithub.com/container-storage-interface/spec/lib/go/csi._Controller_ControllerPublishVolume_Handler\n\t/go/pkg/mod/github.com/container-storage-interface/spec@v1.4.0/lib/go/csi/csi.pb.go:5634\ngoogle.golang.org/grpc.(*Server).processUnaryRPC\n\t/go/pkg/mod/google.golang.org/grpc@v1.26.0/server.go:1024\ngoogle.golang.org/grpc.(*Server).handleStream\n\t/go/pkg/mod/google.golang.org/grpc@v1.26.0/server.go:1313\ngoogle.golang.org/grpc.(*Server).serveStreams.func1.1\n\t/go/pkg/mod/google.golang.org/grpc@v1.26.0/server.go:722"}
{"level":"info","time":"2021-07-23T20:33:16.023969129Z","caller":"wcp/controller.go:655","msg":"Volume attachment failed. Checking if it can be fake attached","TraceId":"cd3e62c8-72a5-46e5-9cbd-03b9f353a8d1"}
{"level":"info","time":"2021-07-23T20:33:16.024797725Z","caller":"wcp/controller.go:686","msg":"Volume e1efe3b2-5605-4f07-adb7-ca27b5c0c206 is not eligible to be fake attached","TraceId":"cd3e62c8-72a5-46e5-9cbd-03b9f353a8d1"}
{"level":"error","time":"2021-07-23T20:33:16.024843021Z","caller":"wcp/controller.go:688","msg":"failed to attach volume with volumeID: e1efe3b2-5605-4f07-adb7-ca27b5c0c206. Error: failed to attach cns volume: \"e1efe3b2-5605-4f07-adb7-ca27b5c0c206\" to node vm: \"VirtualMachine:vm-1076 [VirtualCenterHost: sc2-10-184-101-184.eng.vmware.com, UUID: 50124d64-6a1c-eb92-c1a4-27caa7878758, Datacenter: Datacenter [Datacenter: Datacenter:datacenter-59, VirtualCenterHost: sc2-10-184-101-184.eng.vmware.com]]\". fault: \"(*types.LocalizedMethodFault)(0xc000860fe0)({\\n DynamicData: (types.DynamicData) {\\n },\\n Fault: (types.CnsFault) {\\n BaseMethodFault: (types.BaseMethodFault) <nil>,\\n Reason: (string) (len=16) \\\"VSLM task failed\\\"\\n },\\n LocalizedMessage: (string) (len=32) \\\"CnsFault error: VSLM task failed\\\"\\n})\\n\". opId: \"5b667b81\"","TraceId":"cd3e62c8-72a5-46e5-9cbd-03b9f353a8d1","stacktrace":"sigs.k8s.io/vsphere-csi-driver/pkg/csi/service/wcp.(*controller).ControllerPublishVolume.func1\n\t/build/pkg/csi/service/wcp/controller.go:688\nsigs.k8s.io/vsphere-csi-driver/pkg/csi/service/wcp.(*controller).ControllerPublishVolume\n\t/build/pkg/csi/service/wcp/controller.go:701\ngithub.com/container-storage-interface/spec/lib/go/csi._Controller_ControllerPublishVolume_Handler.func1\n\t/go/pkg/mod/github.com/container-storage-interface/spec@v1.4.0/lib/go/csi/csi.pb.go:5632\ngithub.com/rexray/gocsi/middleware/serialvolume.(*interceptor).controllerPublishVolume\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/middleware/serialvolume/serial_volume_locker.go:120\ngithub.com/rexray/gocsi/middleware/serialvolume.(*interceptor).handle\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/middleware/serialvolume/serial_volume_locker.go:86\ngithub.com/rexray/gocsi/utils.ChainUnaryServer.func2.1.1\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/utils/utils_middleware.go:99\ngithub.com/rexray/gocsi/middleware/specvalidator.(*interceptor).handleServer.func1\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/middleware/specvalidator/spec_validator.go:178\ngithub.com/rexray/gocsi/middleware/specvalidator.(*interceptor).handle\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/middleware/specvalidator/spec_validator.go:218\ngithub.com/rexray/gocsi/middleware/specvalidator.(*interceptor).handleServer\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/middleware/specvalidator/spec_validator.go:177\ngithub.com/rexray/gocsi/utils.ChainUnaryServer.func2.1.1\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/utils/utils_middleware.go:99\ngithub.com/rexray/gocsi.(*StoragePlugin).injectContext\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/middleware.go:231\ngithub.com/rexray/gocsi/utils.ChainUnaryServer.func2.1.1\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/utils/utils_middleware.go:99\ngithub.com/rexray/gocsi/utils.ChainUnaryServer.func2\n\t/go/pkg/mod/github.com/rexray/gocsi@v1.2.2/utils/utils_middleware.go:106\ngithub.com/container-storage-interface/spec/lib/go/csi._Controller_ControllerPublishVolume_Handler\n\t/go/pkg/mod/github.com/container-storage-interface/spec@v1.4.0/lib/go/csi/csi.pb.go:5634\ngoogle.golang.org/grpc.(*Server).processUnaryRPC\n\t/go/pkg/mod/google.golang.org/grpc@v1.26.0/server.go:1024\ngoogle.golang.org/grpc.(*Server).handleStream\n\t/go/pkg/mod/google.golang.org/grpc@v1.26.0/server.go:1313\ngoogle.golang.org/grpc.(*Server).serveStreams.func1.1\n\t/go/pkg/mod/google.golang.org/grpc@v1.26.0/server.go:722"}
{"level":"info","time":"2021-07-23T20:33:16.082063159Z","caller":"wcp/controller.go:603","msg":"ControllerPublishVolume: called with args {VolumeId:e1efe3b2-5605-4f07-adb7-ca27b5c0c206 NodeId:sc2-10-184-104-27.eng.vmware.com VolumeCapability:mount:<fs_type:\"ext4\" > access_mode:<mode:SINGLE_NODE_WRITER > Readonly:false Secrets:map[] VolumeContext:map[storage.kubernetes.io/csiProvisionerIdentity:1627072292358-8081-csi.vsphere.vmware.com type:vSphere CNS Block Volume] XXX_NoUnkeyedLiteral:{} XXX_unrecognized:[] XXX_sizecache:0}","TraceId":"35ff088e-2b26-439d-a026-e2c5086a56de"}
{"level":"info","time":"2021-07-23T20:33:16.082199104Z","caller":"common/util.go:346","msg":"Connecting to K8s Cloud Operator Service on port 29000","TraceId":"35ff088e-2b26-439d-a026-e2c5086a56de"}