Skip to content

Instantly share code, notes, and snippets.

import pandas as pd
import numpy as np
df = pd.read_csv("./girls.csv")
print (df)
df = pd.read_csv("./girls.csv",index_col=['id'])
print (df)
df = pd.read_csv("./girls.csv",names=['a', 'b', 'c','d','e','f','g'])
import pandas as pd
d = {'Tamil' : 90, 'English' : 83, 'Maths' : 67, 'Science' : 83, 'Social' : 45}
s = pd.Series(d)
print (s)
l = [{'Tamil' : 90, 'English' : 83, 'Maths' : 67, 'Science' : 83, 'Social' : 45},
{'Tamil' : 68, 'English' : 89, 'Maths' : 75, 'Science' : 56, 'Social' : 73},
{'Tamil' : 58, 'English' : 88, 'Maths' : 60, 'Science' : 90, 'Social' : 100}]
import pandas as pd
l1 = [90,83,67,83,45]
l2 = [[90,83,67,83,45],[68,89,75,56,73],[58,88,60,90,100]]
df = pd.DataFrame(l2)
print (df)
print (df[2])
print (df[:2])
import pandas as pd
print (pd.Series())
print (pd.DataFrame())
print (pd.Panel())
l1 = [90,83,67,83,45]
s = pd.Series(l1)
print (s)
- hosts: all
vars:
url: "https://downloads.apache.org/kafka/2.5.0/kafka_2.12-2.5.0.tgz"
become: true
tasks:
- name: kafka steps
shell:
cmd: mkdir kafka_test
cmd: mkdir /opt/kafka_test
- hosts: all
become: true
tasks:
- name: create directory
file: state=directory path=kafka_test
file: state=directory path=/opt/kafka_test
- name: delete directory
file: state=absent path=kafka_test
- hosts: all
become: true
tasks:
- name: Update apt
apt: update_cache=yes
- name: Install mysql
apt: name=mysql-client state=latest
from datetime import datetime
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
dag = DAG('bash_exp', description='Mysql backup every hour', schedule_interval='00 * * * *', start_date=datetime(2020, 11, 2), catchup=False)
operator = BashOperator(task_id='backup_task', bash_command='mysqldump -u root -ppassword sample > "/home/shrini/backups/"sample_"$(date +"%Y%m%d_%H%M%S").sql"',dag=dag)
from datetime import datetime
from airflow import DAG
import os
from airflow.operators.python_operator import PythonOperator
from airflow.models import Variable
usr = Variable.get("mysql_username")
pwd = Variable.get("mysql_password")
def backup():
backup_cmd = 'mysqldump -u '+usr+' -p'+pwd+' sample > "/home/shrini/backups/"sample_"$(date +"%Y%m%d_%H%M%S").sql"'
from datetime import datetime
from airflow import DAG
import os
from airflow.operators.python_operator import PythonOperator
def pushfile():
push_cmd = 'rsync -arv -e "ssh -i /home/shrini/shrini-freepem.pem" /home/shrini/backups ubuntu@35.166.185.40:/home/ubuntu'
os.system(push_cmd)
def clr():