Skip to content

Instantly share code, notes, and snippets.

Avatar
🙇‍♂️
Working

Prakhar Mishra prakhar21

🙇‍♂️
Working
View GitHub Profile
View plotting.py
y = readability_scores
N = len(y)
x = [i+1 for i in range(N)]
width = 1/1.5
mini = min(readability_scores)
maxi = max(readability_scores)
pylab.title("Readability Comparison of text")
pylab.xlabel("Book")
View corpus_read.py
import sys
import time
import glob
import codecs
import matplotlib.pyplot as plt
import requests
# Books present
books = sorted(glob.glob("data/harrypotter/*.txt"))
View train_gpt-neo.py
from happytransformer import HappyGeneration, GENTrainArgs
gpt_neo = HappyGeneration("GPT-Neo", "EleutherAI/gpt-neo-125M")
train_args = GENTrainArgs(num_train_epochs=1, learning_rate=2e-05, batch_size=2)
gpt_neo.train("train.txt", args=train_args)
View gist:69f7de651a322283fffce38f45691c3d
from happytransformer import GENSettings
top_k_sampling_settings = GENSettings(do_sample=True, top_k=50, max_length=30, min_length=10)
output_top_k_sampling = gpt_neo.generate_text("Iphone ", args=top_k_sampling_settings)
print (output_top_k_sampling.text)
# iphones are very good for keeping track of calls
@prakhar21
prakhar21 / gptneo.py
Last active May 9, 2021
GPT-neo inference
View gptneo.py
from happytransformer import HappyGeneration
gpt_neo = HappyGeneration(model_type="GPT-NEO", model_name="EleutherAI/gpt-neo-125M")
print (gpt_neo)
@prakhar21
prakhar21 / q_a.py
Created May 8, 2021
q_a_transformer_interpret.py
View q_a.py
from transformers import AutoModelForQuestionAnswering, AutoTokenizer
from transformers_interpret import QuestionAnsweringExplainer
tokenizer = AutoTokenizer.from_pretrained("bert-large-uncased-whole-word-masking-finetuned-squad")
model = AutoModelForQuestionAnswering.from_pretrained("bert-large-uncased-whole-word-masking-finetuned-squad")
qa_explainer = QuestionAnsweringExplainer(
model,
tokenizer,
)
@prakhar21
prakhar21 / attribution.py
Created May 8, 2021
transformer-interpret_attributino.py
View attribution.py
from transformers_interpret import SequenceClassificationExplainer
sample_text = """A very classy nice restaurant. A warm welcoming, followed by an excellent service, with a lot of attention to details on order to please you."""
multiclass_explainer = SequenceClassificationExplainer(model=model, tokenizer=tokenizer)
word_attributions = multiclass_explainer(text=sample_text)
print (word_attributions)
@prakhar21
prakhar21 / transformer_interpet_model_loading.py
Created May 8, 2021
transformer-interpet_model_loading
View transformer_interpet_model_loading.py
from transformers import AutoModelForSequenceClassification, AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("sampathkethineedi/industry-classification")
model = AutoModelForSequenceClassification.from_pretrained(
"sampathkethineedi/industry-classification"
)
print (tokenizer, model)
@prakhar21
prakhar21 / subtitle_translator_streamlit_easynmt.py
Last active Feb 6, 2021
subtitle translator streamlit easynmt
View subtitle_translator_streamlit_easynmt.py
"""
@author: Prakhar Mishra
"""
import streamlit as st
from easynmt import EasyNMT
model = EasyNMT('opus-mt')
LANGS = [('en', 'es'), ('en', 'fr'), ('en', 'de'), ('en', 'ar'), ('en', 'tr'), ('en', 'ko')]
@prakhar21
prakhar21 / trie_node.py
Created Feb 4, 2021
Trie Node in Python
View trie_node.py
class TrieNode:
def __init__(self):
self.child = {}
self.last = False