Skip to content

Instantly share code, notes, and snippets.

@Melihemin
Last active March 17, 2021 12:38
Show Gist options
  • Save Melihemin/df54f8ba75e1b59bd61b54f56ce266c7 to your computer and use it in GitHub Desktop.
Save Melihemin/df54f8ba75e1b59bd61b54f56ce266c7 to your computer and use it in GitHub Desktop.
Deep Learning Tutorial - 01
Display the source blob
Display the rendered blob
Raw
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"name": "churn.ipynb",
"provenance": [],
"collapsed_sections": [],
"include_colab_link": true
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
}
},
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "view-in-github",
"colab_type": "text"
},
"source": [
"<a href=\"https://colab.research.google.com/gist/Melihemin/df54f8ba75e1b59bd61b54f56ce266c7/churn.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 419
},
"id": "e9VsaBcLKjJ_",
"outputId": "7ad904e9-f27a-46cc-e270-1a315b19bb91"
},
"source": [
"import numpy as np\r\n",
"import pandas as pd\r\n",
"\r\n",
"veriler = pd.read_csv('https://bilkav.com/Churn_Modelling.csv')\r\n",
"veriler"
],
"execution_count": 2,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>RowNumber</th>\n",
" <th>CustomerId</th>\n",
" <th>Surname</th>\n",
" <th>CreditScore</th>\n",
" <th>Geography</th>\n",
" <th>Gender</th>\n",
" <th>Age</th>\n",
" <th>Tenure</th>\n",
" <th>Balance</th>\n",
" <th>NumOfProducts</th>\n",
" <th>HasCrCard</th>\n",
" <th>IsActiveMember</th>\n",
" <th>EstimatedSalary</th>\n",
" <th>Exited</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>1</td>\n",
" <td>15634602</td>\n",
" <td>Hargrave</td>\n",
" <td>619</td>\n",
" <td>France</td>\n",
" <td>Female</td>\n",
" <td>42</td>\n",
" <td>2</td>\n",
" <td>0.00</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>101348.88</td>\n",
" <td>1</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>2</td>\n",
" <td>15647311</td>\n",
" <td>Hill</td>\n",
" <td>608</td>\n",
" <td>Spain</td>\n",
" <td>Female</td>\n",
" <td>41</td>\n",
" <td>1</td>\n",
" <td>83807.86</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>1</td>\n",
" <td>112542.58</td>\n",
" <td>0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>3</td>\n",
" <td>15619304</td>\n",
" <td>Onio</td>\n",
" <td>502</td>\n",
" <td>France</td>\n",
" <td>Female</td>\n",
" <td>42</td>\n",
" <td>8</td>\n",
" <td>159660.80</td>\n",
" <td>3</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>113931.57</td>\n",
" <td>1</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>4</td>\n",
" <td>15701354</td>\n",
" <td>Boni</td>\n",
" <td>699</td>\n",
" <td>France</td>\n",
" <td>Female</td>\n",
" <td>39</td>\n",
" <td>1</td>\n",
" <td>0.00</td>\n",
" <td>2</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>93826.63</td>\n",
" <td>0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>5</td>\n",
" <td>15737888</td>\n",
" <td>Mitchell</td>\n",
" <td>850</td>\n",
" <td>Spain</td>\n",
" <td>Female</td>\n",
" <td>43</td>\n",
" <td>2</td>\n",
" <td>125510.82</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>79084.10</td>\n",
" <td>0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>...</th>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" </tr>\n",
" <tr>\n",
" <th>9995</th>\n",
" <td>9996</td>\n",
" <td>15606229</td>\n",
" <td>Obijiaku</td>\n",
" <td>771</td>\n",
" <td>France</td>\n",
" <td>Male</td>\n",
" <td>39</td>\n",
" <td>5</td>\n",
" <td>0.00</td>\n",
" <td>2</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>96270.64</td>\n",
" <td>0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>9996</th>\n",
" <td>9997</td>\n",
" <td>15569892</td>\n",
" <td>Johnstone</td>\n",
" <td>516</td>\n",
" <td>France</td>\n",
" <td>Male</td>\n",
" <td>35</td>\n",
" <td>10</td>\n",
" <td>57369.61</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>101699.77</td>\n",
" <td>0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>9997</th>\n",
" <td>9998</td>\n",
" <td>15584532</td>\n",
" <td>Liu</td>\n",
" <td>709</td>\n",
" <td>France</td>\n",
" <td>Female</td>\n",
" <td>36</td>\n",
" <td>7</td>\n",
" <td>0.00</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>1</td>\n",
" <td>42085.58</td>\n",
" <td>1</td>\n",
" </tr>\n",
" <tr>\n",
" <th>9998</th>\n",
" <td>9999</td>\n",
" <td>15682355</td>\n",
" <td>Sabbatini</td>\n",
" <td>772</td>\n",
" <td>Germany</td>\n",
" <td>Male</td>\n",
" <td>42</td>\n",
" <td>3</td>\n",
" <td>75075.31</td>\n",
" <td>2</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>92888.52</td>\n",
" <td>1</td>\n",
" </tr>\n",
" <tr>\n",
" <th>9999</th>\n",
" <td>10000</td>\n",
" <td>15628319</td>\n",
" <td>Walker</td>\n",
" <td>792</td>\n",
" <td>France</td>\n",
" <td>Female</td>\n",
" <td>28</td>\n",
" <td>4</td>\n",
" <td>130142.79</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>38190.78</td>\n",
" <td>0</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"<p>10000 rows × 14 columns</p>\n",
"</div>"
],
"text/plain": [
" RowNumber CustomerId Surname ... IsActiveMember EstimatedSalary Exited\n",
"0 1 15634602 Hargrave ... 1 101348.88 1\n",
"1 2 15647311 Hill ... 1 112542.58 0\n",
"2 3 15619304 Onio ... 0 113931.57 1\n",
"3 4 15701354 Boni ... 0 93826.63 0\n",
"4 5 15737888 Mitchell ... 1 79084.10 0\n",
"... ... ... ... ... ... ... ...\n",
"9995 9996 15606229 Obijiaku ... 0 96270.64 0\n",
"9996 9997 15569892 Johnstone ... 1 101699.77 0\n",
"9997 9998 15584532 Liu ... 1 42085.58 1\n",
"9998 9999 15682355 Sabbatini ... 0 92888.52 1\n",
"9999 10000 15628319 Walker ... 0 38190.78 0\n",
"\n",
"[10000 rows x 14 columns]"
]
},
"metadata": {
"tags": []
},
"execution_count": 2
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "F4GJEOjeAe7t"
},
"source": [
"from sklearn.preprocessing import OneHotEncoder\r\n",
"from sklearn.compose import ColumnTransformer\r\n",
"\r\n",
"ohe = ColumnTransformer([(\"ohe\", OneHotEncoder(dtype=float),[1])], remainder='passthrough')\r\n",
"X = ohe.fit_transform(X)\r\n",
"X = X[:,1:]"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "NhrJucTHHuin"
},
"source": [
"from sklearn.model_selection import train_test_split\r\n",
"x_train, x_test, y_train, y_test = train_test_split(X,Y,test_size=0.33, random_state=0)\r\n",
"\r\n",
"from sklearn.preprocessing import StandardScaler\r\n",
"sc = StandardScaler()\r\n",
"\r\n",
"X_train = sc.fit_transform(x_train)\r\n",
"X_test = sc.fit_transform(x_test)"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "NryZD2myH1ct",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "725a603e-ba4d-4926-a341-bba68415da5f"
},
"source": [
"import keras\r\n",
"from keras.models import Sequential\r\n",
"from keras.layers import Dense\r\n",
"\r\n",
"classifier = Sequential()\r\n",
"classifier.add(Dense(6, kernel_initializer=\"uniform\", activation=\"relu\", input_dim=11))\r\n",
"classifier.add(Dense(6, kernel_initializer=\"uniform\", activation=\"relu\"))\r\n",
"classifier.add(Dense(1, kernel_initializer=\"uniform\", activation=\"sigmoid\"))\r\n",
"\r\n",
"classifier.compile(optimizer='adam', loss=\"binary_crossentropy\", metrics=['accuracy'])\r\n",
"classifier.fit(X_train, y_train, epochs=80) # epoch kaç aşamada öğreneceği\r\n",
"\r\n",
"y_pred = classifier.predict(X_test)\r\n",
"y_pred = (y_pred > 0.5)\r\n"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"text": [
"Epoch 1/80\n",
"210/210 [==============================] - 1s 1ms/step - loss: 0.6739 - accuracy: 0.7950\n",
"Epoch 2/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.5152 - accuracy: 0.7956\n",
"Epoch 3/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.4132 - accuracy: 0.8219\n",
"Epoch 4/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3902 - accuracy: 0.8322\n",
"Epoch 5/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3737 - accuracy: 0.8447\n",
"Epoch 6/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3724 - accuracy: 0.8443\n",
"Epoch 7/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3599 - accuracy: 0.8556\n",
"Epoch 8/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3540 - accuracy: 0.8558\n",
"Epoch 9/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3489 - accuracy: 0.8561\n",
"Epoch 10/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3448 - accuracy: 0.8634\n",
"Epoch 11/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3397 - accuracy: 0.8612\n",
"Epoch 12/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3340 - accuracy: 0.8656\n",
"Epoch 13/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3432 - accuracy: 0.8600\n",
"Epoch 14/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3447 - accuracy: 0.8593\n",
"Epoch 15/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3443 - accuracy: 0.8631\n",
"Epoch 16/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3472 - accuracy: 0.8612\n",
"Epoch 17/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3533 - accuracy: 0.8590\n",
"Epoch 18/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3312 - accuracy: 0.8647\n",
"Epoch 19/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3398 - accuracy: 0.8668\n",
"Epoch 20/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3304 - accuracy: 0.8694\n",
"Epoch 21/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3519 - accuracy: 0.8569\n",
"Epoch 22/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3271 - accuracy: 0.8642\n",
"Epoch 23/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3532 - accuracy: 0.8557\n",
"Epoch 24/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3369 - accuracy: 0.8653\n",
"Epoch 25/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3457 - accuracy: 0.8615\n",
"Epoch 26/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3364 - accuracy: 0.8601\n",
"Epoch 27/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3369 - accuracy: 0.8688\n",
"Epoch 28/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3397 - accuracy: 0.8704\n",
"Epoch 29/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3455 - accuracy: 0.8664\n",
"Epoch 30/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3450 - accuracy: 0.8617\n",
"Epoch 31/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3369 - accuracy: 0.8637\n",
"Epoch 32/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3514 - accuracy: 0.8582\n",
"Epoch 33/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3372 - accuracy: 0.8643\n",
"Epoch 34/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3239 - accuracy: 0.8742\n",
"Epoch 35/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3391 - accuracy: 0.8634\n",
"Epoch 36/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3502 - accuracy: 0.8583\n",
"Epoch 37/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3281 - accuracy: 0.8685\n",
"Epoch 38/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3462 - accuracy: 0.8587\n",
"Epoch 39/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3516 - accuracy: 0.8564\n",
"Epoch 40/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3422 - accuracy: 0.8618\n",
"Epoch 41/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3314 - accuracy: 0.8677\n",
"Epoch 42/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3460 - accuracy: 0.8592\n",
"Epoch 43/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3422 - accuracy: 0.8669\n",
"Epoch 44/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3389 - accuracy: 0.8666\n",
"Epoch 45/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3408 - accuracy: 0.8652\n",
"Epoch 46/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3332 - accuracy: 0.8679\n",
"Epoch 47/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3333 - accuracy: 0.8643\n",
"Epoch 48/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3324 - accuracy: 0.8674\n",
"Epoch 49/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3369 - accuracy: 0.8669\n",
"Epoch 50/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3447 - accuracy: 0.8606\n",
"Epoch 51/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3331 - accuracy: 0.8617\n",
"Epoch 52/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3361 - accuracy: 0.8665\n",
"Epoch 53/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3324 - accuracy: 0.8692\n",
"Epoch 54/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3397 - accuracy: 0.8651\n",
"Epoch 55/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3463 - accuracy: 0.8560\n",
"Epoch 56/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3329 - accuracy: 0.8670\n",
"Epoch 57/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3303 - accuracy: 0.8648\n",
"Epoch 58/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3416 - accuracy: 0.8597\n",
"Epoch 59/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3404 - accuracy: 0.8659\n",
"Epoch 60/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3389 - accuracy: 0.8639\n",
"Epoch 61/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3284 - accuracy: 0.8707\n",
"Epoch 62/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3487 - accuracy: 0.8590\n",
"Epoch 63/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3247 - accuracy: 0.8724\n",
"Epoch 64/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3438 - accuracy: 0.8650\n",
"Epoch 65/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3327 - accuracy: 0.8697\n",
"Epoch 66/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3380 - accuracy: 0.8661\n",
"Epoch 67/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3408 - accuracy: 0.8628\n",
"Epoch 68/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3404 - accuracy: 0.8645\n",
"Epoch 69/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3527 - accuracy: 0.8556\n",
"Epoch 70/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3342 - accuracy: 0.8675\n",
"Epoch 71/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3263 - accuracy: 0.8704\n",
"Epoch 72/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3389 - accuracy: 0.8649\n",
"Epoch 73/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3502 - accuracy: 0.8602\n",
"Epoch 74/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3474 - accuracy: 0.8561\n",
"Epoch 75/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3401 - accuracy: 0.8583\n",
"Epoch 76/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3354 - accuracy: 0.8666\n",
"Epoch 77/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3333 - accuracy: 0.8678\n",
"Epoch 78/80\n",
"210/210 [==============================] - 0s 2ms/step - loss: 0.3357 - accuracy: 0.8616\n",
"Epoch 79/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3383 - accuracy: 0.8645\n",
"Epoch 80/80\n",
"210/210 [==============================] - 0s 1ms/step - loss: 0.3445 - accuracy: 0.8641\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "WRESeDdiJp1e",
"outputId": "f8342d04-a97c-4649-ce5e-04739a13bc9a"
},
"source": [
"from sklearn.metrics import confusion_matrix\r\n",
"cm = confusion_matrix(y_test,y_pred)\r\n",
"print(cm)"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"text": [
"[[2533 84]\n",
" [ 379 304]]\n"
],
"name": "stdout"
}
]
}
]
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment