Skip to content

Instantly share code, notes, and snippets.

Avatar
🎯
Focusing

Emeka boris ama Emekaborisama

🎯
Focusing
View GitHub Profile
@Emekaborisama
Emekaborisama / 100 Days of DS Code Curriculum
Last active Mar 21, 2019
Inspired by Siraj Raval post on Learn Data science in 3 month check it out here https://www.youtube.com/watch?v=9rDhY1P3YLA
View 100 Days of DS Code Curriculum
All content here has been moved to https://github.com/Emekaborisama/100daysofdscode
View sample.py
from magniv.core import task
from datetime import datetime
import urllib
import json
import tweepy as tp
#auth for twitter api
auth = tp.OAuthHandler('xxxxxxx', 'xxxxxxxx')
auth.set_access_token('xxxxx-xxxxx', 'xxxxxxxx')
api = tp.API(auth, wait_on_rate_limit=False)
View get_btc_price.py
import urllib
import json
def get_bitcoin_data():
"""get btc info via messari api"""
main_result = {}
try:
url = "https://data.messari.io/api/v1/assets/btc/metrics"
resp = urllib.request.urlopen(url).read()
View auth_tweepy.py
import tweepy as tp
#auth for twitter api
auth = tp.OAuthHandler('xxxxxxxxx', 'xxxxxxx')
auth.set_access_token('xxxx-xxxxx', 'xxxxxx')
api = tp.API(auth, wait_on_rate_limit=False)
try:
api.verify_credentials()
print("Authentication done")
@Emekaborisama
Emekaborisama / load_hg_model.py
Last active Aug 30, 2022
load huggingface model
View load_hg_model.py
from sentence_transformers import SentenceTransformer,util
from transformers import AutoTokenizer, AutoModel
import torch
import torch.nn.functional as F
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0]
print(token_embeddings)
@Emekaborisama
Emekaborisama / trans_inference.py
Last active Aug 30, 2022
transformers_inference
View trans_inference.py
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'This is sample of the sentence']
import time
start = time.time()
@Emekaborisama
Emekaborisama / convert_transformer_to_onnx.py
Created Aug 30, 2022
convert transformers model to onnx using pytorch
View convert_transformer_to_onnx.py
torch.onnx.export(
model,
tuple(encoded_input.values()),
f="torch-model.onnx",
input_names=['input_ids', 'attention_mask','token_type_ids'],
output_names=['logits'],
dynamic_axes={'input_ids': {0: 'batch_size', 1: 'sequence'},
'attention_mask': {0: 'batch_size', 1: 'sequence'},
'token_type_ids': {0: 'batch_size', 1: 'sequence'},
'logits': {0: 'batch_size', 1: 'sequence'}},
@Emekaborisama
Emekaborisama / onnx_runtime_inference.py
Last active Aug 30, 2022
onnx inference on cpu with optimization
View onnx_runtime_inference.py
import onnxruntime
import time
ort_session = onnxruntime.InferenceSession("torch-model.onnx", providers=["CPUExecutionProvider"])
def to_numpy(tensor):
return tensor.detach.cpu().numpy() if tensor.requires_grad else tensor.cpu().numpy()
def run_inference(input):
tokenei= tokenizer(input, padding=True, truncation=True,return_tensors="pt")
attention_mask = tokenei['attention_mask']