Skip to content

Instantly share code, notes, and snippets.

@jcnars
Last active November 18, 2021 23:55
Show Gist options
  • Save jcnars/8aecb365c232d4c5332aad184a8df452 to your computer and use it in GitHub Desktop.
Save jcnars/8aecb365c232d4c5332aad184a8df452 to your computer and use it in GitHub Desktop.
jcnarasimhan@jon2:~/DriveFS/My Drive/bmaas/test_automation/oss_prow_onboarding/running_cleanup_install_oracle$ kubectl create configmap rac-inventory-syd2 --from-file="/usr/local/google/home/jcnarasimhan/DriveFS/My Drive/bmaas/test_automation/oss_prow_onboarding/running_cleanup_install_oracle/inventory_rac_syd2" -n test-pods
configmap/rac-inventory-syd2 created
jcnarasimhan@jon2:~/DriveFS/My Drive/bmaas/test_automation/oss_prow_onboarding/running_cleanup_install_oracle$ kubectl create configmap rac-asm-syd2 --from-file="/usr/local/google/home/jcnarasimhan/DriveFS/My Drive/bmaas/test_automation/oss_prow_onboarding/running_cleanup_install_oracle/asm_config_rac_syd2.json" -n test-pods
configmap/rac-asm-syd2 created
jcnarasimhan@jon2:~/DriveFS/My Drive/bmaas/test_automation/oss_prow_onboarding/running_cleanup_install_oracle$ kubectl create configmap rac-datamounts-syd2 --from-file="/usr/local/google/home/jcnarasimhan/DriveFS/My Drive/bmaas/test_automation/oss_prow_onboarding/running_cleanup_install_oracle/data_mounts_config_rac_syd2.json" -n test-pods
configmap/rac-datamounts-syd2 created
jcnarasimhan@jon2:~/DriveFS/My Drive/bmaas/test_automation/oss_prow_onboarding/running_cleanup_install_oracle$ kubectl create configmap rac-clusterconfig-syd2 --from-file="/usr/local/google/home/jcnarasimhan/DriveFS/My Drive/bmaas/test_automation/oss_prow_onboarding/running_cleanup_install_oracle/cluster_config_rac_syd2.json" -n test-pods
configmap/rac-clusterconfig-syd2 created
WHere the file contents are as follows:
jcnarasimhan@jon2:~/DriveFS/My Drive/bmaas/test_automation/oss_prow_onboarding/running_cleanup_install_oracle$ cat inventory_rac_syd2
[dbasm]
at-00010-svr002 ansible_ssh_host=172.16.30.2 vip_name=at-00010-svr002-vip vip_ip=172.16.30.12 ansible_ssh_user=ansible9 ansible_ssh_private_key_file=/etc/files_needed_for_tk/id_rsa_bms_tk_key ansible_ssh_extra_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o IdentityAgent=no'
[dbasm:vars]
scan_name=syd2scan
scan_port=1521
cluster_name=syd2-onenode-cluster
cluster_domain=company.brisbane.com
public_net=bond0.111
private_net=bond1.112
scan_ip1=172.16.30.20
scan_ip2=172.16.30.21
scan_ip3=172.16.30.22
dg_name=DATA
jcnarasimhan@jon2:~/DriveFS/My Drive/bmaas/test_automation/oss_prow_onboarding/running_cleanup_install_oracle$ cat asm_config_rac_syd2.json
[{
"diskgroup": "DATA",
"disks": [
{ "name": "DATA_7539397549", "blk_device": "/dev/mapper/3600a098038314344382b4f7539397549" },
{ "name": "DATA_753939754A", "blk_device": "/dev/mapper/3600a098038314344382b4f753939754a" },
{ "name": "DATA_753939754B", "blk_device": "/dev/mapper/3600a098038314344382b4f753939754b" },
{ "name": "DATA_753939754C", "blk_device": "/dev/mapper/3600a098038314344382b4f753939754c" },
{ "name": "DATA_753939754D", "blk_device": "/dev/mapper/3600a098038314344382b4f753939754d" },
{ "name": "DATA_753939754E", "blk_device": "/dev/mapper/3600a098038314344382b4f753939754e" },
{ "name": "DATA_753939754F", "blk_device": "/dev/mapper/3600a098038314344382b4f753939754f" },
{ "name": "DATA_7539397550", "blk_device": "/dev/mapper/3600a098038314344382b4f7539397550" },
{ "name": "DATA_75392D3846", "blk_device": "/dev/mapper/3600a098038314344372b4f75392d3846" },
{ "name": "DATA_75392D3847", "blk_device": "/dev/mapper/3600a098038314344372b4f75392d3847" },
{ "name": "DATA_75392D3848", "blk_device": "/dev/mapper/3600a098038314344372b4f75392d3848" },
{ "name": "DATA_75392D3849", "blk_device": "/dev/mapper/3600a098038314344372b4f75392d3849" },
{ "name": "DATA_75392D384A", "blk_device": "/dev/mapper/3600a098038314344372b4f75392d384a" },
{ "name": "DATA_75392D384B", "blk_device": "/dev/mapper/3600a098038314344372b4f75392d384b" },
{ "name": "DATA_75392D384C", "blk_device": "/dev/mapper/3600a098038314344372b4f75392d384c" },
{ "name": "DATA_75392D384D", "blk_device": "/dev/mapper/3600a098038314344372b4f75392d384d" },
]},
{
"diskgroup": "RECO",
"disks": [
{ "name": "RECO_7539397551", "blk_device": "/dev/mapper/3600a098038314344382b4f7539397551" },
{ "name": "RECO_7539397552", "blk_device": "/dev/mapper/3600a098038314344382b4f7539397552" },
{ "name": "RECO_7539397553", "blk_device": "/dev/mapper/3600a098038314344382b4f7539397553" },
{ "name": "RECO_7539397554", "blk_device": "/dev/mapper/3600a098038314344382b4f7539397554" },
{ "name": "RECO_75392D384E", "blk_device": "/dev/mapper/3600a098038314344372b4f75392d384e" },
{ "name": "RECO_75392D384F", "blk_device": "/dev/mapper/3600a098038314344372b4f75392d384f" },
{ "name": "RECO_75392D3850", "blk_device": "/dev/mapper/3600a098038314344372b4f75392d3850" },
{ "name": "RECO_75392D3851", "blk_device": "/dev/mapper/3600a098038314344372b4f75392d3851" },
]}
]
jcnarasimhan@jon2:~/DriveFS/My Drive/bmaas/test_automation/oss_prow_onboarding/running_cleanup_install_oracle$ cat data_mounts_config_rac_syd2.json
[
{
"purpose": "software",
"blk_device": "/dev/mapper/sw-u01",
"name": "u01",
"fstype":"xfs",
"mount_point":"/u01",
"mount_opts":"defaults"
},
{
"purpose": "diag",
"blk_device": "/dev/mapper/sw-u02",
"name": "u02",
"fstype":"xfs",
"mount_point":"/u02",
"mount_opts":"defaults"
}
]
jcnarasimhan@jon2:~/DriveFS/My Drive/bmaas/test_automation/oss_prow_onboarding/running_cleanup_install_oracle$ cat cluster_config_rac_syd2.json
[
{
"scan_name": "syd2scan",
"scan_port": "1521",
"cluster_name": "syd2-onenode-cluster",
"cluster_domain": "company.brisbane.com",
"public_net": "bond0.111",
"private_net": "bond1.112",
"scan_ip1": "172.16.30.20",
"scan_ip2": "172.16.30.21",
"scan_ip3": "172.16.30.22",
"dg_name": "DATA",
"nodes": [
{ "node_name": "at-00010-svr002",
"host_ip": "172.16.30.2",
"vip_name": "at-00010-svr002-vip",
"vip_ip": "172.16.30.12"
}
]
}
]
jcnarasimhan@jon2:~/DriveFS/My Drive/bmaas/test_automation/oss_prow_onboarding/running_cleanup_install_oracle$ grep -v \# ~/mydrive/bmaas/test_automation/oss_prow_onboarding/test_locally_rac.yaml
---
apiVersion: v1
kind: Pod
metadata:
name: test-pods-namespace-lo-podname
namespace: test-pods
spec:
hostNetwork: true
containers:
- name: ansibleinstance
image: quay.io/ansible/ansible-runner:stable-2.9-latest
command:
- /bin/sh
- -c
args:
- echo ; echo "starting - listing /root before adding host to known_hosts";
ls -al "/root";
mkdir /root/.ssh;
chmod 0700 /root/.ssh;
ssh-keyscan -tecdsa 172.16.30.1 > /root/.ssh/known_hosts;
echo; echo "listing /root/.ssh after adding host to known_hosts";
ls -al "/root/.ssh";
cd /root;git clone -b jcnars-patch-3 --single-branch https://github.com/google/bms-toolkit.git;
ls -l /root;
pip install jmespath;
cp /etc/files_needed_for_tk/google-cloud-sdk.repo /etc/yum.repos.d/google-cloud-sdk.repo;
yum install google-cloud-sdk -y;
cd /root/bms-toolkit; ./cleanup-oracle.sh --ora-version 19 --inventory-file /etc/files_needed_for_tk/rac-inv --yes-i-am-sure --ora-disk-mgmt udev --ora-swlib-path /u01/oracle_install --ora-asm-disks /etc/files_needed_for_tk/rac-asm.json --ora-data-mounts /etc/files_needed_for_tk/rac-datamounts.json > /root/bms-toolkit/sydney2.brute_cleanup 2>&1 ;
sleep 60000;
echo; echo done;
resources:
requests:
memory: "2.0Gi"
cpu: "3.0"
volumeMounts:
- name: all-in-one
mountPath: /etc/files_needed_for_tk
volumes:
- name: all-in-one
projected:
sources:
- secret:
name: id-rsa-bms-ansible9-syd1
items:
- key: id_rsa_bms_tk_key
path: id_rsa_bms_tk_key
mode: 0400
- configMap:
name: rac-asm-syd2
items:
- key: asm_config_rac_syd2.json
path: rac-asm.json
- configMap:
name: rac-datamounts-syd2
items:
- key: data_mounts_config_rac_syd2.json
path: rac-datamounts.json
- configMap:
name: rac-inventory-syd2
items:
- key: inventory_rac_syd2
path: rac-inv
- configMap:
name: rac-clusterconfig-syd2
items:
- key: cluster_config_rac_syd2.json
path: rac-clusterconfig.json
- configMap:
name: google-cloud-sdk
items:
- key: google-cloud-sdk.repo
path: google-cloud-sdk.repo
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment