Skip to content

Instantly share code, notes, and snippets.

View joeyism's full-sized avatar

Joey joeyism

View GitHub Profile
@joeyism
joeyism / deploy_aws.sh
Created November 7, 2022 15:50
Deploying Flask to AWS
#!/bin/bash
CURRENT_ID=$(dig +short myip.opendns.com @resolver1.opendns.com)
read -p "Server Name [$CURRENT_ID]: " SERVER_NAME
SERVER_NAME=${SERVER_NAME:-${CURRENT_ID}}
CURRENT_WORK_DIR=$(pwd)
read -e -p "Working Directory [$CURRENT_WORK_DIR]: " WORKING_DIR
WORKING_DIR=${WORKING_DIR:-${CURRENT_WORK_DIR}}
read -e -p "GUnicorn Executable (i.e. /usr/bin/gunicorn): " GUNICORN_EXEC
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
from transformers import GPT2Tokenizer, GPT2LMHeadModel
import torch
import numpy as np
OUTPUT_DIR = "./output"
device = 'cpu'
if torch.cuda.is_available():
device = 'cuda'
tokenizer = GPT2Tokenizer.from_pretrained(OUTPUT_DIR)
import torch.multiprocessing as mp
QUEUE_SIZE = mp.Value('i', 0)
def _decrease_queue():
with QUEUE_SIZE.get_lock():
QUEUE_SIZE.value -= 1
def _increase_queue():
with QUEUE_SIZE.get_lock():
import torch.multiprocessing as mp
QUEUE_SIZE = mp.Value('i', 0)
def _decrease_queue():
with QUEUE_SIZE.get_lock():
QUEUE_SIZE.value -= 1
def _increase_queue():
with QUEUE_SIZE.get_lock():
@joeyism
joeyism / main.py
Last active October 9, 2019 02:07
import string
import copy
import gpt2
predictor = gpt2.Gpt2Predictor()
def predict_until_punctuation(input_str):
if not input_str:
raise Exception('input string required')
output_str = copy.deepcopy(input_str)
{"logits": [-166.17694091796875,
-166.25209045410156,
-166.281494140625,
-166.30458068847656,
-166.33673095703125,
-166.34226989746094,
-166.35443115234375,
-166.36343383789062,
-166.37081909179688,
-166.40570068359375],
@joeyism
joeyism / main.py
Last active October 9, 2019 01:50
import gpt2
predictor = gpt2.Gpt2Predictor()
result = predictor.predict_json({"previous": "Toronto Raptors, who are currently tied for the league leader in wins"})
import gpt2
predictor = gpt2.Gpt2Predictor()
@joeyism
joeyism / gpt2.py
Last active October 9, 2019 01:42
from allennlp.predictors import Predictor
from pytorch_pretrained_bert.tokenization_gpt2 import GPT2Tokenizer
from pytorch_pretrained_bert.modeling_gpt2 import GPT2LMHeadModel
import torch
SMALL_MODEL = 'gpt2'
MEDIUM_MODEL = 'https://storage.googleapis.com/allennlp/models/gpt2-345M-dump'
class Gpt2Predictor(Predictor):
"""