Skip to content

Instantly share code, notes, and snippets.

View koshian2's full-sized avatar

こしあん koshian2

View GitHub Profile
@koshian2
koshian2 / ex1.py
Created May 8, 2018 15:42
Coursera Machine LearningをPythonで実装 - [Week2]単回帰分析、重回帰分析 (1)単回帰分析
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
# 単回帰分析 #
# データ
X = np.array([6.1101,5.5277,8.5186,7.0032,5.8598,8.3829,7.4764,8.5781,6.4862,5.0546,5.7107,14.164,5.734,8.4084,5.6407,5.3794,6.3654,5.1301,6.4296,7.0708,6.1891,20.27,5.4901,6.3261,5.5649,18.945,12.828,10.957,13.176,22.203,5.2524,6.5894,9.2482,5.8918,8.2111,7.9334,8.0959,5.6063,12.836,6.3534,5.4069,6.8825,11.708,5.7737,7.8247,7.0931,5.0702,5.8014,11.7,5.5416,7.5402,5.3077,7.4239,7.6031,6.3328,6.3589,6.2742,5.6397,9.3102,9.4536,8.8254,5.1793,21.279,14.908,18.959,7.2182,8.2951,10.236,5.4994,20.341,10.136,7.3345,6.0062,7.2259,5.0269,6.5479,7.5386,5.0365,10.274,5.1077,5.7292,5.1884,6.3557,9.7687,6.5159,8.5172,9.1802,6.002,5.5204,5.0594,5.7077,7.6366,5.8707,5.3054,8.2934,13.394,5.4369])
@koshian2
koshian2 / ex1_m.py
Created May 8, 2018 15:44
Coursera Machine LearningをPythonで実装 - [Week2]単回帰分析、重回帰分析 (2)重回帰分析
import numpy as np
import matplotlib.pyplot as plt
# 重回帰分析 #
# データ
X_data = np.array([[2104,3],[1600,3],[2400,3],[1416,2],[3000,4],[1985,4],[1534,3],[1427,3],[1380,3],[1494,3],[1940,4],[2000,3],[1890,3],[4478,5],[1268,3],[2300,4],[1320,2],[1236,3],[2609,4],[3031,4],[1767,3],[1888,2],[1604,3],[1962,4],[3890,3],[1100,3],[1458,3],[2526,3],[2200,3],[2637,3],[1839,2],[1000,1],[2040,4],[3137,3],[1811,4],[1437,3],[1239,3],[2132,4],[4215,4],[2162,4],[1664,2],[2238,3],[2567,4],[1200,3],[852,2],[1852,4],[1203,3]])
y = np.array([399900,329900,369000,232000,539900,299900,314900,198999,212000,242500,239999,347000,329999,699900,259900,449900,299900,199900,499998,599000,252900,255000,242900,259900,573900,249900,464500,469000,475000,299900,349900,169900,314900,579900,285900,249900,229900,345000,549000,287000,368500,329900,314000,299000,179900,299900,239500])
@koshian2
koshian2 / ex2.py
Created May 8, 2018 16:59
Coursera Machine LearningをPythonで実装 - [Week3]ロジスティック回帰 (1)正則化なしロジスティック回帰、自分で実装
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import fmin
X_data = np.array([[34.6236596245169,78.0246928153624],[30.286710768226,43.894997524001],[35.8474087699387,72.9021980270836],[60.1825993862097,86.3085520954682],[79.0327360507101,75.3443764369103],[45.0832774766833,56.3163717815305],[61.1066645368476,96.5114258848962],[75.0247455673888,46.5540135411653],[76.0987867022625,87.420569719268],[84.4328199612003,43.533393310721],[95.8615550709357,38.2252780579509],[75.0136583895824,30.6032632342801],[82.3070533739948,76.481963302356],[69.3645887597093,97.718691961886],[39.5383391436722,76.0368108511588],[53.9710521485623,89.207350137502],[69.0701440628302,52.7404697301676],[67.9468554771161,46.6785741067312],[70.6615095549943,92.9271378936483],[76.9787837274749,47.5759636497553],[67.3720275457087,42.8384383202917],[89.6767757507207,65.7993659274523],[50.534788289883,48.855811527642],[34.2120609778678,44.2095285986628],[77.9240914545704,68.9723599933059],[62.2710136700463,69.95445795
@koshian2
koshian2 / ex2_builtin.py
Created May 8, 2018 17:00
Coursera Machine LearningをPythonで実装 - [Week3]ロジスティック回帰 (2)正則化なしロジスティック回帰、組み込み
import numpy as np
from sklearn.linear_model import LogisticRegression
# データ
X_data = np.array([[34.6236596245169,78.0246928153624],[30.286710768226,43.894997524001],[35.8474087699387,72.9021980270836],[60.1825993862097,86.3085520954682],[79.0327360507101,75.3443764369103],[45.0832774766833,56.3163717815305],[61.1066645368476,96.5114258848962],[75.0247455673888,46.5540135411653],[76.0987867022625,87.420569719268],[84.4328199612003,43.533393310721],[95.8615550709357,38.2252780579509],[75.0136583895824,30.6032632342801],[82.3070533739948,76.481963302356],[69.3645887597093,97.718691961886],[39.5383391436722,76.0368108511588],[53.9710521485623,89.207350137502],[69.0701440628302,52.7404697301676],[67.9468554771161,46.6785741067312],[70.6615095549943,92.9271378936483],[76.9787837274749,47.5759636497553],[67.3720275457087,42.8384383202917],[89.6767757507207,65.7993659274523],[50.534788289883,48.855811527642],[34.2120609778678,44.2095285986628],[77.9240914545704,68.9723599933059],[62.2710136700463,69.9544579544758],
@koshian2
koshian2 / ex2_reg.py
Created May 8, 2018 17:03
Coursera Machine LearningをPythonで実装 - [Week3]ロジスティック回帰 (3)正則化ありロジスティック回帰、自分で実装
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import fmin_bfgs
X_data = np.array([[0.051267,0.69956],[-0.092742,0.68494],[-0.21371,0.69225],[-0.375,0.50219],[-0.51325,0.46564],[-0.52477,0.2098],[-0.39804,0.034357],[-0.30588,-0.19225],[0.016705,-0.40424],[0.13191,-0.51389],[0.38537,-0.56506],[0.52938,-0.5212],[0.63882,-0.24342],[0.73675,-0.18494],[0.54666,0.48757],[0.322,0.5826],[0.16647,0.53874],[-0.046659,0.81652],[-0.17339,0.69956],[-0.47869,0.63377],[-0.60541,0.59722],[-0.62846,0.33406],[-0.59389,0.005117],[-0.42108,-0.27266],[-0.11578,-0.39693],[0.20104,-0.60161],[0.46601,-0.53582],[0.67339,-0.53582],[-0.13882,0.54605],[-0.29435,0.77997],[-0.26555,0.96272],[-0.16187,0.8019],[-0.17339,0.64839],[-0.28283,0.47295],[-0.36348,0.31213],[-0.30012,0.027047],[-0.23675,-0.21418],[-0.06394,-0.18494],[0.062788,-0.16301],[0.22984,-0.41155],[0.2932,-0.2288],[0.48329,-0.18494],[0.64459,-0.14108],[0.46025,0.012427],[0.6273,0.15863],[0.57546,0.26827],[0.72523,0.44371],[0.22408,0.52412],[0.44297,
@koshian2
koshian2 / ex2_reg_builtin.py
Created May 8, 2018 17:04
Coursera Machine LearningをPythonで実装 - [Week3]ロジスティック回帰 (4)正則化ありロジスティック回帰、組み込み
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LogisticRegression
X_data = np.array([[0.051267,0.69956],[-0.092742,0.68494],[-0.21371,0.69225],[-0.375,0.50219],[-0.51325,0.46564],[-0.52477,0.2098],[-0.39804,0.034357],[-0.30588,-0.19225],[0.016705,-0.40424],[0.13191,-0.51389],[0.38537,-0.56506],[0.52938,-0.5212],[0.63882,-0.24342],[0.73675,-0.18494],[0.54666,0.48757],[0.322,0.5826],[0.16647,0.53874],[-0.046659,0.81652],[-0.17339,0.69956],[-0.47869,0.63377],[-0.60541,0.59722],[-0.62846,0.33406],[-0.59389,0.005117],[-0.42108,-0.27266],[-0.11578,-0.39693],[0.20104,-0.60161],[0.46601,-0.53582],[0.67339,-0.53582],[-0.13882,0.54605],[-0.29435,0.77997],[-0.26555,0.96272],[-0.16187,0.8019],[-0.17339,0.64839],[-0.28283,0.47295],[-0.36348,0.31213],[-0.30012,0.027047],[-0.23675,-0.21418],[-0.06394,-0.18494],[0.062788,-0.16301],[0.22984,-0.41155],[0.2932,-0.2288],[0.48329,-0.18494],[0.64459,-0.14108],[0.46025,0.012427],[0.6273,0.15863],[0.57546,0.26827],[0.72523,0.44371],[0.22408,0.52
@koshian2
koshian2 / ex3.py
Created May 8, 2018 17:17
Coursera Machine LearningをPythonで実装 - [Week4]ニューラルネットワーク(1) [1]多クラス分類、自分で実装
import numpy as np
import matplotlib.pyplot as plt
from scipy.io import loadmat
# データの読み込み
def load_data1():
data = loadmat("ex3data1")
# yが元データだと5000x1の行列なので、ベクトルに変換する
return np.array(data['X']), np.ravel(np.array(data['y']))
@koshian2
koshian2 / ex3_builtin.py
Created May 8, 2018 17:18
Coursera Machine LearningをPythonで実装 - [Week4]ニューラルネットワーク(1) [2]多クラス分類、組み込み
import numpy as np
from scipy.io import loadmat
from sklearn.linear_model import LogisticRegression
# データの読み込み
def load_data1():
data = loadmat("ex3data1")
# yが元データだと5000x1の行列なので、ベクトルに変換する
return np.array(data['X']), np.ravel(np.array(data['y']))
@koshian2
koshian2 / ex3_nn.py
Created May 8, 2018 17:20
Coursera Machine LearningをPythonで実装 - [Week4]ニューラルネットワーク(1) [3]ニューラルネットワーク、自分で実装
import numpy as np
import matplotlib.pyplot as plt
from scipy.io import loadmat
# データの読み込み
def load_data1():
data = loadmat("ex3data1")
# yが元データだと5000x1の行列なので、ベクトルに変換する
return np.array(data['X']), np.ravel(np.array(data['y']))
@koshian2
koshian2 / ex3_nn_builtin.py
Created May 8, 2018 17:21
Coursera Machine LearningをPythonで実装 - [Week4]ニューラルネットワーク(1) [4]ニューラルネットワーク、組み込み
import numpy as np
import matplotlib.pyplot as plt
from scipy.io import loadmat
from sklearn.neural_network import MLPClassifier
# データの読み込み
def load_data1():
data = loadmat("ex3data1")
# yが元データだと5000x1の行列なので、ベクトルに変換する