This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class TrainingDataGenerator(tf.keras.utils.Sequence): | |
def __init__(self, | |
meta_data_map_path, | |
image_folder, | |
batch_size=1, | |
recalculate_batch_idx=False, | |
shuffle_within_batch=True, | |
rescale=1./255, | |
interpolation = "bilinear" | |
): |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class TrainingDataGenerator(tf.keras.utils.Sequence): | |
def __init__(self, | |
meta_data_map_path, | |
image_folder, | |
batch_size=1, | |
recalculate_batch_idx=False, | |
shuffle_within_batch=True, | |
rescale=1./255, | |
interpolation = "bilinear" | |
): |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class CIFAR10Sequence(Sequence): | |
def __init__(self, x_set, y_set, batch_size): | |
self.x, self.y = x_set, y_set | |
self.batch_size = batch_size | |
def __len__(self): | |
return math.ceil(len(self.x) / self.batch_size) | |
def __getitem__(self, idx): |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
@keras_export('keras.layers.Attention') | |
class Attention(BaseDenseAttention): | |
"""Dot-product attention layer, a.k.a. Luong-style attention. | |
Inputs are `query` tensor of shape `[batch_size, Tq, dim]`, `value` tensor of | |
shape `[batch_size, Tv, dim]` and `key` tensor of shape | |
`[batch_size, Tv, dim]`. The calculation follows the steps: | |
1. Calculate scores with shape `[batch_size, Tq, Tv]` as a `query`-`key` dot | |
product: `scores = tf.matmul(query, key, transpose_b=True)`. | |
2. Use scores to calculate a distribution with shape | |
`[batch_size, Tq, Tv]`: `distribution = tf.nn.softmax(scores)`. |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
@keras_export('keras.layers.AdditiveAttention') | |
class AdditiveAttention(BaseDenseAttention): | |
"""Additive attention layer, a.k.a. Bahdanau-style attention. | |
Inputs are `query` tensor of shape `[batch_size, Tq, dim]`, `value` tensor of | |
shape `[batch_size, Tv, dim]` and `key` tensor of shape | |
`[batch_size, Tv, dim]`. The calculation follows the steps: | |
1. Reshape `query` and `value` into shapes `[batch_size, Tq, 1, dim]` | |
and `[batch_size, 1, Tv, dim]` respectively. | |
2. Calculate scores with shape `[batch_size, Tq, Tv]` as a non-linear |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def call(self, | |
inputs, | |
mask=None, | |
training=None, | |
initial_state=None, | |
constants=None): | |
if not isinstance(initial_state, (list, tuple, type(None))): | |
initial_state = [initial_state] | |
if not isinstance(constants, (list, tuple, type(None))): | |
constants = [constants] |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def call(self, inputs, states, training=None): | |
if 0 < self.dropout < 1 and self._dropout_mask is None: | |
self._dropout_mask = _generate_dropout_mask( | |
K.ones_like(inputs), | |
self.dropout, | |
training=training, | |
count=4) | |
if (0 < self.recurrent_dropout < 1 and | |
self._recurrent_dropout_mask is None): | |
self._recurrent_dropout_mask = _generate_dropout_mask( |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def build(self, input_shape): | |
input_dim = input_shape[-1] | |
if type(self.recurrent_initializer).__name__ == 'Identity': | |
def recurrent_identity(shape, gain=1., dtype=None): | |
del dtype | |
return gain * np.concatenate( | |
[np.identity(shape[0])] * (shape[1] // shape[0]), axis=1) | |
self.recurrent_initializer = recurrent_identity |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import pandas as pd | |
from spacy.tokenizer import Tokenizer | |
from spacy.lang.en import English | |
import gensim | |
from gensim import corpora, models | |
from gensim.models import Word2Vec | |
nlp = English() | |
tokenizer = Tokenizer(nlp.vocab) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def buildModelWord2Vec(docs, numberOfFeatures=100, windowSize=10, minimumFrequncy=5): | |
model=Word2Vec( | |
docs, | |
workers=4, | |
size=numberOfFeatures, | |
min_count=minimumFrequncy, | |
window=windowSize | |
) | |
return model |
NewerOlder