Skip to content

Instantly share code, notes, and snippets.

# StackOverflow question: https://stackoverflow.com/questions/51123481/how-to-build-a-language-model-using-lstm-that-assigns-probability-of-occurence-f
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
from keras.layers import Embedding, LSTM, Dense
from keras.models import Sequential
import keras
import numpy as np
def prepare_sentence(seq, maxlen):
@rvinas
rvinas / 53105294_implementing-a-batch-dependent-loss-in-keras.py
Last active November 8, 2018 16:15
Solution for "Implementing a batch dependent loss in Keras" (StackOverflow)
# StackOverflow question: https://stackoverflow.com/questions/53105294/implementing-a-batch-dependent-loss-in-keras
from keras.utils import Sequence
from keras.models import Model
from keras.layers import Input, Dense
import keras.backend as K
import numpy as np
# Constants
input_dim = 64 # digits.data.shape[1]
dataset = np.random.rand(1000, input_dim) # TODO: replace with digits.data
@rvinas
rvinas / language_model.py
Last active April 11, 2023 14:58
Computing the probability of occurrence of a sentence with a LSTM model using Keras
# StackOverflow question: https://stackoverflow.com/questions/51123481/how-to-build-a-language-model-using-lstm-that-assigns-probability-of-occurence-f
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
from keras.layers import Embedding, LSTM, Dense
from keras.models import Sequential
import numpy as np
def prepare_sentence(seq, maxlen):
# Pads seq and slides windows