Skip to content

Instantly share code, notes, and snippets.

@uni-3
Created July 28, 2020 15:00
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save uni-3/eb5a09c92f5e9c64bc486df83a10f326 to your computer and use it in GitHub Desktop.
Save uni-3/eb5a09c92f5e9c64bc486df83a10f326 to your computer and use it in GitHub Desktop.
neural mf with tensorflow
from dataclasses import dataclass
import tensorflow as tf
# https://www.tensorflow.org/tutorials/quickstart/advanced
@dataclass
class NeuMF:
n_user: int
n_item: int
params: dict
def __post_init__(self):
self.model = self.construct_model()
def _construct_layers(self, user_input, item_input) -> tf.keras.Model:
n_user = self.n_user
n_item = self.n_item
params = self.params
model_layers = params["model_layers"]
mf_regularization = params["mf_regularization"]
mlp_reg_layers = params["mlp_reg_layers"]
# matrix factorization
mf_dim = params["mf_dim"]
if model_layers[0] % 2 != 0:
raise ValueError("The first layer size should be multiple of 2!")
embedding_initializer = "glorot_uniform"
# Embedding layer
mf_embedding_user = tf.keras.layers.Embedding(
input_dim=n_user, output_dim=mf_dim, name='mf_embedding_user',
embeddings_initializer=embedding_initializer,
embeddings_regularizer=tf.keras.regularizers.l2(mf_regularization),
input_length=1)(user_input)
mf_embedding_item = tf.keras.layers.Embedding(
input_dim=n_item, output_dim=mf_dim, name='mf_embedding_item',
embeddings_initializer=embedding_initializer,
embeddings_regularizer=tf.keras.regularizers.l2(mf_regularization),
input_length=1)(item_input)
mlp_embedding_user = tf.keras.layers.Embedding(
input_dim=n_user, output_dim=model_layers[0] // 2, name="mlp_embedding_user",
embeddings_initializer=embedding_initializer,
embeddings_regularizer=tf.keras.regularizers.l2(mf_regularization),
input_length=1)(user_input)
mlp_embedding_item = tf.keras.layers.Embedding(
input_dim=n_item, output_dim=model_layers[0] // 2, name='mlp_embedding_item',
embeddings_initializer=embedding_initializer,
embeddings_regularizer=tf.keras.regularizers.l2(mf_regularization),
input_length=1)(item_input)
# MF
mf_vector = tf.keras.layers.multiply([mf_embedding_user, mf_embedding_item])
# MLP
mlp_vector = tf.keras.layers.concatenate([mlp_embedding_user, mlp_embedding_item])
num_layer = len(model_layers) # Number of layers in the MLP
for layer in range(1, num_layer):
model_layer = tf.keras.layers.Dense(
model_layers[layer],
kernel_regularizer=tf.keras.regularizers.l2(mlp_reg_layers[layer]),
activation="relu")
mlp_vector = model_layer(mlp_vector)
# concat MF and MLP
predict_vector = tf.keras.layers.concatenate([mf_vector, mlp_vector])
# prediction layer
logits = tf.keras.layers.Dense(
1, activation='sigmoid',
kernel_initializer="lecun_uniform",
name="rating")(predict_vector)
return tf.keras.Model(inputs=[user_input, item_input],
outputs=logits)
def construct_model(self) -> tf.keras.Model:
"""Constructs and returns the model."""
user_input = tf.keras.layers.Input(
#shape=(1,),
shape=(),
name="user_id",
dtype=tf.int32
)
item_input = tf.keras.layers.Input(
#shape=(1,),
shape=(),
name="item_id",
dtype=tf.int32
)
base_model = self._construct_layers(user_input, item_input)
#logits = base_model.output
model = tf.keras.Model(
inputs={
"item_id": item_input,
"user_id": user_input
},
outputs=base_model.output)
return model
def load_saved_model(self, path='./models'):
self.model = tf.keras.models.load_model(path)
#return self.model
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment