Skip to content

Instantly share code, notes, and snippets.

View prakhar21's full-sized avatar
🙇‍♂️
Working

Prakhar Mishra prakhar21

🙇‍♂️
Working
View GitHub Profile
import os
import time
import random
import re
from slackclient import SlackClient
from keras.applications.vgg16 import VGG16
from keras.preprocessing.image import load_img
from keras.preprocessing.image import img_to_array
from keras.applications.vgg16 import preprocess_input
from keras.applications.vgg16 import decode_predictions
y = readability_scores
N = len(y)
x = [i+1 for i in range(N)]
width = 1/1.5
mini = min(readability_scores)
maxi = max(readability_scores)
pylab.title("Readability Comparison of text")
pylab.xlabel("Book")
@prakhar21
prakhar21 / corpus_read.py
Created July 4, 2021 16:41
reading corpus
import sys
import time
import glob
import codecs
import matplotlib.pyplot as plt
import requests
# Books present
books = sorted(glob.glob("data/harrypotter/*.txt"))
from happytransformer import GENSettings
top_k_sampling_settings = GENSettings(do_sample=True, top_k=50, max_length=30, min_length=10)
output_top_k_sampling = gpt_neo.generate_text("Iphone ", args=top_k_sampling_settings)
print (output_top_k_sampling.text)
# iphones are very good for keeping track of calls
from happytransformer import HappyGeneration, GENTrainArgs
gpt_neo = HappyGeneration("GPT-Neo", "EleutherAI/gpt-neo-125M")
train_args = GENTrainArgs(num_train_epochs=1, learning_rate=2e-05, batch_size=2)
gpt_neo.train("train.txt", args=train_args)
@prakhar21
prakhar21 / gptneo.py
Last active May 9, 2021 06:25
GPT-neo inference
from happytransformer import HappyGeneration
gpt_neo = HappyGeneration(model_type="GPT-NEO", model_name="EleutherAI/gpt-neo-125M")
print (gpt_neo)
@prakhar21
prakhar21 / q_a.py
Created May 8, 2021 11:32
q_a_transformer_interpret.py
from transformers import AutoModelForQuestionAnswering, AutoTokenizer
from transformers_interpret import QuestionAnsweringExplainer
tokenizer = AutoTokenizer.from_pretrained("bert-large-uncased-whole-word-masking-finetuned-squad")
model = AutoModelForQuestionAnswering.from_pretrained("bert-large-uncased-whole-word-masking-finetuned-squad")
qa_explainer = QuestionAnsweringExplainer(
model,
tokenizer,
)
@prakhar21
prakhar21 / attribution.py
Created May 8, 2021 11:01
transformer-interpret_attributino.py
from transformers_interpret import SequenceClassificationExplainer
sample_text = """A very classy nice restaurant. A warm welcoming, followed by an excellent service, with a lot of attention to details on order to please you."""
multiclass_explainer = SequenceClassificationExplainer(model=model, tokenizer=tokenizer)
word_attributions = multiclass_explainer(text=sample_text)
print (word_attributions)
@prakhar21
prakhar21 / transformer_interpet_model_loading.py
Created May 8, 2021 10:56
transformer-interpet_model_loading
from transformers import AutoModelForSequenceClassification, AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("sampathkethineedi/industry-classification")
model = AutoModelForSequenceClassification.from_pretrained(
"sampathkethineedi/industry-classification"
)
print (tokenizer, model)
@prakhar21
prakhar21 / subtitle_translator_streamlit_easynmt.py
Last active February 6, 2021 08:44
subtitle translator streamlit easynmt
"""
@author: Prakhar Mishra
"""
import streamlit as st
from easynmt import EasyNMT
model = EasyNMT('opus-mt')
LANGS = [('en', 'es'), ('en', 'fr'), ('en', 'de'), ('en', 'ar'), ('en', 'tr'), ('en', 'ko')]