Skip to content

Instantly share code, notes, and snippets.

View PandaWhoCodes's full-sized avatar

Thomas Ashish Cherian PandaWhoCodes

View GitHub Profile
@PandaWhoCodes
PandaWhoCodes / constants.py
Created February 7, 2020 06:52
API access for Tweet Assistant - Note: Make sure you register at TA before using the API
API_ENDPOINT = "https://tweetassistant.infoassistants.com"
<html>
<head>
<meta charset="UTF-8">
<title>Currency Converter</title>
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/2.1.4/jquery.min.js"></script>
</head>
<body>
<label for="amount">Enter amount in ruppess </label>
<input type="number" id="amount">
<button onclick="calculate();">Calculate</button>
@PandaWhoCodes
PandaWhoCodes / indeed.py
Created January 28, 2020 07:07
Indeed job scraping ( old code )
from bs4 import BeautifulSoup
import requests
import re
jobkey = input("Keyword: ") #Get the job title/keyword input from user as string
region = input("Location: ") #Get the geographic region from user as string
jobkey=jobkey.replace(" ","+")
url = "http://www.indeed.co.in/jobs?q={0}&l={1}".format(jobkey, region) #create the url, including the search terms
f = requests.get(url)
@PandaWhoCodes
PandaWhoCodes / ner.py
Created January 27, 2020 18:43
NER - Named Entity Extraction
import string
from nltk.stem.snowball import SnowballStemmer
import os
from nltk.chunk import conlltags2tree, tree2conlltags
import pickle
# Change here
corpus_root = 'gmb-2.2.0'
@PandaWhoCodes
PandaWhoCodes / crawler.py
Created January 27, 2020 18:40
Sitemap Generator
import urllib.request
from urllib.parse import urlsplit, urlunsplit, urljoin, urlparse
import re
class Crawler:
def __init__(self, url, exclude=None, no_verbose=False):
self.url = self.normalize(url)
self.host = urlparse(self.url).netloc
@PandaWhoCodes
PandaWhoCodes / send.py
Created January 27, 2020 18:38
Simple Bot backend
import requests
import json
def sendData(data,name):
if name == True:
print("Query:\n"+data["name"])
elif "msg" in data:
print("Query:\n" + data["msg"])
data = json.dumps(data)
import imaplib
from email.parser import BytesParser
from pprint import pprint
import email.header
import time
import json
import csv
# see blog article https://teklern.blogspot.com/2017/11/download-all-your-email-information.html for instructions
# just run add you username and password and configure below for ouput_filename path, and output_type to json or csv
@PandaWhoCodes
PandaWhoCodes / extract_tags.py
Created January 27, 2020 18:34
Extract tech terms from a given string
import json
import string
tech_terms = []
with open("tags.json", "r") as f:
data = json.loads(f.read())
termlist = []
for items in data:
for tags in items["items"]:
tag = tags["name"]
@PandaWhoCodes
PandaWhoCodes / QA_generator.py
Created January 27, 2020 18:31
Generate QA
from nltk.corpus import wordnet as wn
from textblob import TextBlob
# from greenteapress_scraper import get_text
from pytutor_scrapper import scrape
import re
# import wikipedia
class Article:
"""Retrieves and analyzes wikipedia articles"""
import csv
from nltk import ngrams
import sys
import collections
import string
translator=str.maketrans('','',string.punctuation)
def to_string(list):
"""
converts a list into string