Skip to content

Instantly share code, notes, and snippets.

View ricardocarvalhods's full-sized avatar
👨‍💻
Coding..

Ricardo Carvalho ricardocarvalhods

👨‍💻
Coding..
View GitHub Profile
from subprocess import call
call(['python', '-m', 'nbconvert', 'My_Notebook.ipynb'])
# Do not include azure-functions-worker as it may conflict with the Azure Functions platform
azure-functions
transformers
requests
-f https://download.pytorch.org/whl/torch_stable.html
torch==1.9.0+cpu
import logging
import json
import azure.functions as func
from transformers import AutoTokenizer, AutoModelForSequenceClassification
from transformers import pipeline
import torch
tokenizer = AutoTokenizer.from_pretrained("distilbert-base-uncased-finetuned-sst-2-english")
model = AutoModelForSequenceClassification.from_pretrained("distilbert-base-uncased-finetuned-sst-2-english")
nlp = pipeline('sentiment-analysis', model=model, tokenizer=tokenizer)
@ricardocarvalhods
ricardocarvalhods / puwrapper.py
Created August 31, 2018 15:22 — forked from nkt1546789/puwrapper.py
A wrapper class for PU classification on Python (proposed by Elkan and Noto, 2008).
import numpy as np
from numpy import random
from sklearn import base
class PUWrapper(object):
def __init__(self,trad_clf,n_fold=5):
self._trad_clf=trad_clf
self._n_fold=n_fold
def fit(self,X,s):
makeglm <- function(formula, ..., family, data=NULL) {
dots <- list(...)
out<-list()
tt <- terms(formula, data=data)
if(!is.null(data)) {
mf <- model.frame(tt, data)
vn <- sapply(attr(tt, "variables")[-1], deparse)
if((yvar <- attr(tt, "response"))>0)
vn <- vn[-yvar]