Skip to content

Instantly share code, notes, and snippets.

@jbencook
Created December 26, 2020 20:40
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save jbencook/fe011677674c363256e73bc47c94e2d9 to your computer and use it in GitHub Desktop.
Save jbencook/fe011677674c363256e73bc47c94e2d9 to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"2.4.0\n"
]
}
],
"source": [
"import tensorflow as tf\n",
"print(tf.__version__)"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"class DoubleLinearLayer(tf.keras.layers.Layer):\n",
" def __init__(self, n_units=8):\n",
" super().__init__()\n",
" self.n_units = n_units\n",
" \n",
" def build(self, input_shape):\n",
" self.weights1 = self.add_weight(\n",
" \"weights1\",\n",
" shape=(int(input_shape[-1]), self.n_units),\n",
" initializer=tf.keras.initializers.RandomNormal(),\n",
" )\n",
" self.weights2 = self.add_weight(\n",
" \"weights2\",\n",
" shape=(self.n_units, self.n_units),\n",
" initializer=tf.keras.initializers.RandomNormal(),\n",
" )\n",
"\n",
" def call(self, inputs):\n",
" x = tf.matmul(inputs, self.weights1)\n",
" return tf.matmul(x, self.weights2)"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"<tf.Tensor: shape=(3, 8), dtype=float32, numpy=\n",
"array([[ 0.08985905, -0.01076644, -0.03135672, -0.01940718, -0.04436477,\n",
" -0.02511168, -0.0466373 , -0.01953002],\n",
" [ 0.08985905, -0.01076644, -0.03135672, -0.01940718, -0.04436477,\n",
" -0.02511168, -0.0466373 , -0.01953002],\n",
" [ 0.08985905, -0.01076644, -0.03135672, -0.01940718, -0.04436477,\n",
" -0.02511168, -0.0466373 , -0.01953002]], dtype=float32)>"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"layer = DoubleLinearLayer()\n",
"x = tf.ones((3, 100))\n",
"layer(x)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"class DoubleLinearLayer2(tf.keras.layers.Layer):\n",
" def __init__(self, n_units=8):\n",
" super().__init__()\n",
" self.dense1 = tf.keras.layers.Dense(n_units, use_bias=False)\n",
" self.dense2 = tf.keras.layers.Dense(n_units, use_bias=False)\n",
"\n",
" def call(self, inputs):\n",
" x = self.dense1(inputs)\n",
" return self.dense2(x)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"<tf.Tensor: shape=(3, 8), dtype=float32, numpy=\n",
"array([[-0.15208174, -1.8050282 , 0.6666308 , 1.520147 , 0.14671874,\n",
" -0.36840513, -2.1613884 , 1.089746 ],\n",
" [-0.15208174, -1.8050282 , 0.6666308 , 1.520147 , 0.14671874,\n",
" -0.36840513, -2.1613884 , 1.089746 ],\n",
" [-0.15208174, -1.8050282 , 0.6666308 , 1.520147 , 0.14671874,\n",
" -0.36840513, -2.1613884 , 1.089746 ]], dtype=float32)>"
]
},
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"layer2 = DoubleLinearLayer2()\n",
"layer2(x)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"class CustomClassifier(tf.keras.Model):\n",
" def __init__(self):\n",
" super().__init__()\n",
" self.resnet = tf.keras.applications.ResNet50(include_top=False)\n",
" self.flatten = tf.keras.layers.Flatten()\n",
" self.head = tf.keras.layers.Dense(10, activation=\"softmax\")\n",
" \n",
" def call(self, inputs):\n",
" x = self.resnet(inputs)\n",
" x = self.flatten(x)\n",
" return self.head(x)"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"<tf.Tensor: shape=(12, 10), dtype=float32, numpy=\n",
"array([[0.00857573, 0.51075023, 0.0278917 , 0.04193351, 0.12776485,\n",
" 0.0630367 , 0.00453431, 0.01734107, 0.12475921, 0.07341271],\n",
" [0.00857573, 0.51075023, 0.0278917 , 0.04193351, 0.12776485,\n",
" 0.0630367 , 0.00453431, 0.01734107, 0.12475921, 0.07341271],\n",
" [0.00857573, 0.51075023, 0.0278917 , 0.04193351, 0.12776485,\n",
" 0.0630367 , 0.00453431, 0.01734107, 0.12475921, 0.07341271],\n",
" [0.00857573, 0.51075023, 0.0278917 , 0.04193351, 0.12776485,\n",
" 0.0630367 , 0.00453431, 0.01734107, 0.12475921, 0.07341271],\n",
" [0.00857573, 0.51075023, 0.0278917 , 0.04193351, 0.12776485,\n",
" 0.0630367 , 0.00453431, 0.01734107, 0.12475921, 0.07341271],\n",
" [0.00857573, 0.51075023, 0.0278917 , 0.04193351, 0.12776485,\n",
" 0.0630367 , 0.00453431, 0.01734107, 0.12475921, 0.07341271],\n",
" [0.00857573, 0.51075023, 0.0278917 , 0.04193351, 0.12776485,\n",
" 0.0630367 , 0.00453431, 0.01734107, 0.12475921, 0.07341271],\n",
" [0.00857573, 0.51075023, 0.0278917 , 0.04193351, 0.12776485,\n",
" 0.0630367 , 0.00453431, 0.01734107, 0.12475921, 0.07341271],\n",
" [0.00857573, 0.51075023, 0.0278917 , 0.04193351, 0.12776485,\n",
" 0.0630367 , 0.00453431, 0.01734107, 0.12475921, 0.07341271],\n",
" [0.00857573, 0.51075023, 0.0278917 , 0.04193351, 0.12776485,\n",
" 0.0630367 , 0.00453431, 0.01734107, 0.12475921, 0.07341271],\n",
" [0.00857573, 0.51075023, 0.0278917 , 0.04193351, 0.12776485,\n",
" 0.0630367 , 0.00453431, 0.01734107, 0.12475921, 0.07341271],\n",
" [0.00857573, 0.51075023, 0.0278917 , 0.04193351, 0.12776485,\n",
" 0.0630367 , 0.00453431, 0.01734107, 0.12475921, 0.07341271]],\n",
" dtype=float32)>"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x2 = tf.ones((12, 224, 224, 3))\n",
"model = CustomClassifier()\n",
"model(x2)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.5"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment