Skip to content

Instantly share code, notes, and snippets.

View CHARITH1995's full-sized avatar
😟
Working from home

charith prasanna CHARITH1995

😟
Working from home
View GitHub Profile
response = s3client.put_object(
Body=gz_body.getvalue(),
Bucket=bucket_name,
Key=s3_path,
ContentType='text/plain', # the original type
ContentEncoding='gzip', # MUST have or browsers will error
ACL='public-read'
)
@CHARITH1995
CHARITH1995 / aggreagation
Last active July 27, 2022 07:24
login.js file
{
"query": {
"bool": {
"filter": [
{
"range": {
"@timestamp": {
"from": "{{period_end}}||-1m",
"to": "{{period_end}}",
"include_lower": true,
@CHARITH1995
CHARITH1995 / must not
Last active July 27, 2022 07:30
index.js file
{
"query": {
"bool": {
"filter": [
{
"range": {
"@timestamp": {
"from": "{{period_end}}||-1m",
"to": "{{period_end}}",
"include_lower": true,
@CHARITH1995
CHARITH1995 / config.json
Created July 20, 2020 18:29
config file
{
"api": {
"invokeUrl": "https://5bltcq602h.execute-api.us-west-2.amazonaws.com/prod"
},
"cognito" : {
"REGION":"us-east-1",
"USER_POOL_ID":"us-east-1_************",
"APP_CLIENT_ID" :"34uglnub******md0d1d"
}
}
@CHARITH1995
CHARITH1995 / webscrapping-full.py
Last active July 17, 2020 08:32
webscrapping full code
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
'exec(%matplotlib inline)'
from urllib.request import urlopen
from bs4 import BeautifulSoup
import time
from selenium import webdriver
import MySQLdb
@CHARITH1995
CHARITH1995 / save-mySQL.py
Created July 17, 2020 08:26
save on mySQL database
sql = "INSERT INTO `newsarticles` (`source`, `sid`, `time`, `title`, `body`) VALUES (%s, %s, %s, %s, %s);" # those names depends on your database.
try:
cur.execute(sql, ('adaderana',0, date, title, contents))
db.commit()
except (MySQLdb.Error, MySQLdb.Warning) as e:
print ("SQL Error")
raise e
@CHARITH1995
CHARITH1995 / save-csv.py
Created July 17, 2020 08:23
save data on a .csv file
with open ('news.csv','w',encoding="utf-8") as file: # you have to create .csv file call news.csv before use this
writer=csv.writer(file)
writer.writerow(course)
@CHARITH1995
CHARITH1995 / save-txt.py
Created July 17, 2020 08:21
save on .txt
with open("news.txt", "w", encoding="utf-8") as file:
file.write(date)
file.write(title.decode(encoding='UTF-8'))
file.write(contents.decode(encoding='UTF-8'))
file.close()
@CHARITH1995
CHARITH1995 / get-data.py
Last active July 17, 2020 08:17
get data on <h>,<a> .. etc
for link in all_div:
news_container = link.find_all("div",{"class" : "story-text"})
for news in news_container:
h2_tags = news.find_all("h2")
for url in h2_tags:
a_tags = url.find_all('a')
for end_point in a_tags:
#print(end_point.get("href"))
url_ind = "http://sinhala.adaderana.lk/"+end_point.get("href")
driver_ind = webdriver.Firefox(executable_path=r'D:\apps\anaconda\geckodriver.exe')
@CHARITH1995
CHARITH1995 / web-scrapping.py
Created July 17, 2020 08:12
web scrapping libraries import
from bs4 import BeautifulSoup
import time
from selenium import webdriver
import MySQLdb
from datetime import datetime
import csv
url = "http://sinhala.adaderana.lk/sinhala-hot-news.php" #this is website url
db = MySQLdb.connect(host="localhost",