Skip to content

Instantly share code, notes, and snippets.

# install krew
(
set -x; cd "$(mktemp -d)" &&
curl -fsSLO "https://github.com/kubernetes-sigs/krew/releases/latest/download/krew.tar.gz" &&
tar zxvf krew.tar.gz &&
KREW=./krew-"$(uname | tr '[:upper:]' '[:lower:]')_amd64" &&
"$KREW" install krew
)
vi ~/.zshrc
@pranaysahith
pranaysahith / cleanup_docker_data.sh
Last active August 30, 2022 07:34
delete containers and images
#!/bin/bash
# delete all docker images
docker rmi $(docker images -a -q)
# delete docker containers
docker container prune -f
# clean everything
docker system prune -f
#!/bin/bash
curl -s https://packages.microsoft.com/keys/microsoft.asc | apt-key add -
curl -s https://packages.microsoft.com/config/ubuntu/16.04/prod.list > /etc/apt/sources.list.d/mssql-release.list
apt-get update
ACCEPT_EULA=Y apt-get install msodbcsql17
sudo apt-get -y install unixodbc-dev
sudo apt-get install python3-pip -y
sudo apt-get install python-dev
pip3 install --upgrade pyodbc
sudo rpm --import https://packages.microsoft.com/keys/microsoft.asc
sudo sh -c 'echo -e "[azure-cli]\nname=Azure CLI\nbaseurl=https://packages.microsoft.com/yumrepos/azure-cli\nenabled=1\ngpgcheck=1\ngpgkey=https://packages.microsoft.com/keys/microsoft.asc" > /etc/yum.repos.d/azure-cli.repo'
sudo yum install -y azure-cli
az login
curl -LO https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl
chmod +x ./kubectl
sudo mv ./kubectl /usr/local/bin/kubectl
# curl https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 | bash
# or
curl https://get.helm.sh/helm-v3.1.0-linux-amd64.tar.gz
tar -zxvf helm-v3.1.0-linux-amd64.tar.gz
mv linux-amd64/helm /usr/local/bin/helm
1. Import orders from table retail_db.orders from order id 1 to 30,000 into hdfs folder /user/cloudera/problem1 using sqoop.
fields should be tab delimited and store data in sequence file format. Use compression codec as gzip. result should have 30,000 records.
Sol:- sqoop import --connect jdbc:mysql://quickstart.cloudera/retail_db --username retail_dba --password cloudera --table orders --where 'order_id <= 30000' --target-dir /user/cloudera/problem1 --compress --as-sequencefile --fields-terminated-by \t
2. Export data from hdfs folder /user/cloudera/problem2 to table retail_db.orders1. data in hdfs is compressed using gzip. fields
are delimited by ','.
setup prob 2 -