Skip to content

Instantly share code, notes, and snippets.

View Akashdesarda's full-sized avatar

Akash Desarda Akashdesarda

View GitHub Profile
# Stage 1: Builder/Compiler
FROM python:3.8-slim as builder
RUN apt update && \
apt install --no-install-recommends -y build-essential gcc
COPY req.txt /req.txt
RUN pip install --no-cache-dir --user -r /req.txt
# Stage 2: Runtime
FROM nvidia/cuda:10.1-cudnn7-runtime
FROM nvidia/cuda:10.1-cudnn7-runtime
RUN apt update && \
apt install --no-install-recommends -y build-essential software-properties-common && \
add-apt-repository -y ppa:deadsnakes/ppa && \
apt install --no-install-recommends -y python3.8 python3-pip python3-setuptools python3-distutils && \
apt clean && rm -rf /var/lib/apt/lists/*
COPY req.txt /req.txt
COPY ./src /src
# Stage 1: Builder/Compiler
FROM python:3.7-slim as builder
RUN apt update && \
apt install --no-install-recommends -y build-essential gcc
COPY req.txt /req.txt
RUN pip install --no-cache-dir --user -r /req.txt
# Stage 2: Runtime
FROM debian:buster-slim
FROM python:3.8-slim
RUN apt update && \
apt install --no-install-recommends -y build-essential gcc && \
apt clean && rm -rf /var/lib/apt/lists/*
COPY ./req.txt /req.txt
COPY ./src /src
RUN pip3 install --no-cache-dir -r /req.txt
CMD ['python3', '/src/app.py']
distil_bert = 'distilbert-base-uncased'
config = DistilBertConfig(dropout=0.2, attention_dropout=0.2)
config.output_hidden_states = False
transformer_model = TFDistilBertModel.from_pretrained(distil_bert, config = config)
input_ids_in = tf.keras.layers.Input(shape=(128,), name='input_token', dtype='int32')
input_masks_in = tf.keras.layers.Input(shape=(128,), name='masked_token', dtype='int32')
embedding_layer = transformer_model(input_ids_in, attention_mask=input_masks_in)[0]
distil_bert = 'distilbert-base-uncased'
config = DistilBertConfig(dropout=0.2, attention_dropout=0.2)
config.output_hidden_states = False
transformer_model = TFDistilBertModel.from_pretrained(distil_bert, config = config)
input_ids_in = tf.keras.layers.Input(shape=(128,), name='input_token', dtype='int32')
input_masks_in = tf.keras.layers.Input(shape=(128,), name='masked_token', dtype='int32')
embedding_layer = transformer_model(input_ids_in, attention_mask=input_masks_in)[0]
from transformers import TFDistilBertForSequenceClassification, DistilBertConfig
import tensorflow as tf
distil_bert = 'distilbert-base-uncased'
config = DistilBertConfig(num_labels=6)
config.output_hidden_states = False
transformer_model = TFDistilBertForSequenceClassification.from_pretrained(distil_bert, config = config)[0]
input_ids = tf.keras.layers.Input(shape=(128,), name='input_token', dtype='int32')
from transformers import DistilBertTokenizer, RobertaTokenizer,
distil_bert = 'distilbert-base-uncased' # Pick any desired pre-trained model
roberta = 'roberta-base-uncase'
# Defining DistilBERT tokonizer
tokenizer = DistilBertTokenizer.from_pretrained(distil_bert, do_lower_case=True, add_special_tokens=True,
max_length=128, pad_to_max_length=True)
# Defining RoBERTa tokinizer
tokenizer = RobertaTokenizer.from_pretrained(roberta, do_lower_case=True, add_special_tokens=True,
max_length=128, pad_to_max_length=True)
# depth should be 9n+2 (eg 56 or 110)
# Model definition
num_filters_in = 32
num_res_block = int((depth - 2) / 9)
inputs = Input(shape=input_shape)
# ResNet V2 performs Conv2D on X before spiting into two path
X = residual_block(X=inputs, num_filters=num_filters_in, conv_first=True)