Skip to content

Instantly share code, notes, and snippets.

@mikigom
Last active April 26, 2017 09:50
Show Gist options
  • Save mikigom/c083b8c6f0bcc1540adbb1741263f148 to your computer and use it in GitHub Desktop.
Save mikigom/c083b8c6f0bcc1540adbb1741263f148 to your computer and use it in GitHub Desktop.
Project Nagne DL semina code
Display the source blob
Display the rendered blob
Raw
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
32.502345269453031 31.70700584656992
53.426804033275019 68.77759598163891
61.530358025636438 62.562382297945803
47.475639634786098 71.546632233567777
59.813207869512318 87.230925133687393
55.142188413943821 78.211518270799232
52.211796692214001 79.64197304980874
39.299566694317065 59.171489321869508
48.10504169176825 75.331242297063056
52.550014442733818 71.300879886850353
45.419730144973755 55.165677145959123
54.351634881228918 82.478846757497919
44.164049496773352 62.008923245725825
58.16847071685779 75.392870425994957
56.727208057096611 81.43619215887864
48.955888566093719 60.723602440673965
44.687196231480904 82.892503731453715
60.297326851333466 97.379896862166078
45.618643772955828 48.847153317355072
38.816817537445637 56.877213186268506
66.189816606752601 83.878564664602763
65.41605174513407 118.59121730252249
47.48120860786787 57.251819462268969
41.57564261748702 51.391744079832307
51.84518690563943 75.380651665312357
59.370822011089523 74.765564032151374
57.31000343834809 95.455052922574737
63.615561251453308 95.229366017555307
46.737619407976972 79.052406169565586
50.556760148547767 83.432071421323712
52.223996085553047 63.358790317497878
35.567830047746632 41.412885303700563
42.436476944055642 76.617341280074044
58.16454011019286 96.769566426108199
57.504447615341789 74.084130116602523
45.440530725319981 66.588144414228594
61.89622268029126 77.768482417793024
33.093831736163963 50.719588912312084
36.436009511386871 62.124570818071781
37.675654860850742 60.810246649902211
44.555608383275356 52.682983366387781
43.318282631865721 58.569824717692867
50.073145632289034 82.905981485070512
43.870612645218372 61.424709804339123
62.997480747553091 115.24415280079529
32.669043763467187 45.570588823376085
40.166899008703702 54.084054796223612
53.575077531673656 87.994452758110413
33.864214971778239 52.725494375900425
64.707138666121296 93.576118692658241
38.119824026822805 80.166275447370964
44.502538064645101 65.101711570560326
40.599538384552318 65.562301260400375
41.720676356341293 65.280886920822823
51.088634678336796 73.434641546324301
55.078095904923202 71.13972785861894
41.377726534895203 79.102829683549857
62.494697427269791 86.520538440347153
49.203887540826003 84.742697807826218
41.102685187349664 59.358850248624933
41.182016105169822 61.684037524833627
50.186389494880601 69.847604158249183
52.378446219236217 86.098291205774103
50.135485486286122 59.108839267699643
33.644706006191782 69.89968164362763
39.557901222906828 44.862490711164398
56.130388816875467 85.498067778840223
57.362052133238237 95.536686846467219
60.269214393997906 70.251934419771587
35.678093889410732 52.721734964774988
31.588116998132829 50.392670135079896
53.66093226167304 63.642398775657753
46.682228649471917 72.247251068662365
43.107820219102464 57.812512976181402
70.34607561504933 104.25710158543822
44.492855880854073 86.642020318822006
57.50453330326841 91.486778000110135
36.930076609191808 55.231660886212836
55.805733357942742 79.550436678507609
38.954769073377065 44.847124242467601
56.901214702247074 80.207523139682763
56.868900661384046 83.14274979204346
34.33312470421609 55.723489260543914
59.04974121466681 77.634182511677864
57.788223993230673 99.051414841748269
54.282328705967409 79.120646274680027
51.088719898979143 69.588897851118475
50.282836348230731 69.510503311494389
44.211741752090113 73.687564318317285
38.005488008060688 61.366904537240131
32.940479942618296 67.170655768995118
53.691639571070056 85.668203145001542
68.76573426962166 114.85387123391394
46.230966498310252 90.123572069967423
68.319360818255362 97.919821035242848
50.030174340312143 81.536990783015028
49.239765342753763 72.111832469615663
50.039575939875988 85.232007342325673
48.149858891028863 66.224957888054632
25.128484647772304 53.454394214850524
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 41,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"text/plain": [
"{'divide': 'ignore', 'invalid': 'ignore', 'over': 'ignore', 'under': 'ignore'}"
]
},
"execution_count": 41,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# https://gist.github.com/yusugomori/4462221\n",
"\n",
"#!/usr/bin/env python\n",
"# -*- coding: utf-8 -*-\n",
"\n",
"'''\n",
" Logistic Regression\n",
" \n",
" References :\n",
" - Jason Rennie: Logistic Regression,\n",
" http://qwone.com/~jason/writing/lr.pdf\n",
" \n",
" - DeepLearningTutorials\n",
" https://github.com/lisa-lab/DeepLearningTutorials\n",
"'''\n",
"\n",
"import sys\n",
"import numpy\n",
"numpy.seterr(all='ignore')"
]
},
{
"cell_type": "code",
"execution_count": 42,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"def sigmoid(x):\n",
" return 1. / (1 + numpy.exp(-x))\n",
"\n",
"def softmax(x):\n",
" e = numpy.exp(x - numpy.max(x)) # prevent overflow\n",
" if e.ndim == 1:\n",
" return e / numpy.sum(e, axis=0)\n",
" else: \n",
" return e / numpy.array([numpy.sum(e, axis=1)]).T # ndim = 2"
]
},
{
"cell_type": "code",
"execution_count": 43,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"class LogisticRegression(object):\n",
" def __init__(self, input, label, n_in, n_out):\n",
" self.x = input\n",
" self.y = label\n",
" self.W = numpy.zeros((n_in, n_out)) # initialize W 0\n",
" self.b = numpy.zeros(n_out) # initialize bias 0\n",
"\n",
" # self.params = [self.W, self.b]\n",
"\n",
" def train(self, lr, input=None):\n",
" if input is not None:\n",
" self.x = input\n",
"\n",
" # p_y_given_x = sigmoid(numpy.dot(self.x, self.W) + self.b)\n",
" p_y_given_x = softmax(numpy.dot(self.x, self.W) + self.b)\n",
" d_y = self.y - p_y_given_x\n",
" \n",
" self.W += numpy.dot(self.x.T, d_y)\n",
" self.b += numpy.mean(d_y, axis=0)\n",
" \n",
" # cost = self.negative_log_likelihood()\n",
" # return cost\n",
"\n",
" def negative_log_likelihood(self):\n",
" # sigmoid_activation = sigmoid(numpy.dot(self.x, self.W) + self.b)\n",
" sigmoid_activation = softmax(numpy.dot(self.x, self.W) + self.b)\n",
"\n",
" cross_entropy = - numpy.mean(\n",
" numpy.sum(self.y * numpy.log(sigmoid_activation) +\n",
" (1 - self.y) * numpy.log(1 - sigmoid_activation),\n",
" axis=1))\n",
"\n",
" return cross_entropy\n",
"\n",
"\n",
" def predict(self, x):\n",
" # return sigmoid(numpy.dot(x, self.W) + self.b)\n",
" return softmax(numpy.dot(x, self.W) + self.b)"
]
},
{
"cell_type": "code",
"execution_count": 48,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"def test_lr(learning_rate=0.01, n_epochs=200):\n",
" # training data\n",
" x = numpy.array([[1,1,1,0,0,0],\n",
" [1,0,1,0,0,0],\n",
" [1,1,1,0,0,0],\n",
" [0,0,1,1,1,0],\n",
" [0,0,1,1,0,0],\n",
" [0,0,1,1,1,0],\n",
" [0,0,0,0,0,1],\n",
" [0,0,0,0,1,1],\n",
" [0,0,0,1,1,1]])\n",
" y = numpy.array([[1, 0, 0],\n",
" [1, 0, 0],\n",
" [1, 0, 0],\n",
" [0, 1, 0],\n",
" [0, 1, 0],\n",
" [0, 1, 0],\n",
" [0, 0, 1],\n",
" [0, 0, 1],\n",
" [0, 0, 1]])\n",
"\n",
"\n",
" # construct LogisticRegression\n",
" classifier = LogisticRegression(input=x, label=y, n_in=6, n_out=3)\n",
"\n",
" # train\n",
" for epoch in xrange(n_epochs):\n",
" classifier.train(lr=learning_rate)\n",
" cost = classifier.negative_log_likelihood()\n",
" print >> sys.stderr, 'Training epoch %d, cost is ' % epoch, cost\n",
" learning_rate *= 0.95\n",
"\n",
"\n",
" # test\n",
" x = numpy.array([1, 1, 0, 0, 0, 0])\n",
" print >> sys.stderr, classifier.predict(x)\n",
" \n",
" x = numpy.array([0, 0.1, 0.8, 0.9, 0, 0])\n",
" print >> sys.stderr, classifier.predict(x)\n",
" \n",
" x = numpy.array([0, 0, 0, 0.4, 0.85, 1])\n",
" print >> sys.stderr, classifier.predict(x)"
]
},
{
"cell_type": "code",
"execution_count": 49,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Training epoch 0, cost is 0.135533012285\n",
"Training epoch 1, cost is 0.0665156794653\n",
"Training epoch 2, cost is 0.0568679520828\n",
"Training epoch 3, cost is 0.0500187297872\n",
"Training epoch 4, cost is 0.0447724811729\n",
"Training epoch 5, cost is 0.040580788612\n",
"Training epoch 6, cost is 0.0371372816802\n",
"Training epoch 7, cost is 0.034250363384\n",
"Training epoch 8, cost is 0.0317914035782\n",
"Training epoch 9, cost is 0.0296697730648\n",
"Training epoch 10, cost is 0.0278193249187\n",
"Training epoch 11, cost is 0.0261904467019\n",
"Training epoch 12, cost is 0.0247450997888\n",
"Training epoch 13, cost is 0.0234535801145\n",
"Training epoch 14, cost is 0.0222923268322\n",
"Training epoch 15, cost is 0.0212423967869\n",
"Training epoch 16, cost is 0.020288376856\n",
"Training epoch 17, cost is 0.0194175925471\n",
"Training epoch 18, cost is 0.0186195219131\n",
"Training epoch 19, cost is 0.0178853547312\n",
"Training epoch 20, cost is 0.01720765633\n",
"Training epoch 21, cost is 0.0165801080217\n",
"Training epoch 22, cost is 0.0159973044213\n",
"Training epoch 23, cost is 0.0154545935567\n",
"Training epoch 24, cost is 0.014947949549\n",
"Training epoch 25, cost is 0.0144738703503\n",
"Training epoch 26, cost is 0.0140292949515\n",
"Training epoch 27, cost is 0.0136115358541\n",
"Training epoch 28, cost is 0.0132182236149\n",
"Training epoch 29, cost is 0.0128472610096\n",
"Training epoch 30, cost is 0.0124967849212\n",
"Training epoch 31, cost is 0.0121651344709\n",
"Training epoch 32, cost is 0.0118508242292\n",
"Training epoch 33, cost is 0.0115525215845\n",
"Training epoch 34, cost is 0.0112690275332\n",
"Training epoch 35, cost is 0.0109992603033\n",
"Training epoch 36, cost is 0.0107422413335\n",
"Training epoch 37, cost is 0.0104970832211\n",
"Training epoch 38, cost is 0.0102629793239\n",
"Training epoch 39, cost is 0.0100391947557\n",
"Training epoch 40, cost is 0.00982505856325\n",
"Training epoch 41, cost is 0.00961995690552\n",
"Training epoch 42, cost is 0.0094233270905\n",
"Training epoch 43, cost is 0.00923465234463\n",
"Training epoch 44, cost is 0.00905345721233\n",
"Training epoch 45, cost is 0.00887930349885\n",
"Training epoch 46, cost is 0.00871178668259\n",
"Training epoch 47, cost is 0.00855053273484\n",
"Training epoch 48, cost is 0.0083951952939\n",
"Training epoch 49, cost is 0.00824545314815\n",
"Training epoch 50, cost is 0.00810100798943\n",
"Training epoch 51, cost is 0.00796158240339\n",
"Training epoch 52, cost is 0.00782691806812\n",
"Training epoch 53, cost is 0.00769677413626\n",
"Training epoch 54, cost is 0.0075709257791\n",
"Training epoch 55, cost is 0.00744916287412\n",
"Training epoch 56, cost is 0.00733128881963\n",
"Training epoch 57, cost is 0.00721711946244\n",
"Training epoch 58, cost is 0.00710648212611\n",
"Training epoch 59, cost is 0.00699921472897\n",
"Training epoch 60, cost is 0.00689516498234\n",
"Training epoch 61, cost is 0.00679418966064\n",
"Training epoch 62, cost is 0.0066961539358\n",
"Training epoch 63, cost is 0.00660093076963\n",
"Training epoch 64, cost is 0.0065084003583\n",
"Training epoch 65, cost is 0.00641844962365\n",
"Training epoch 66, cost is 0.00633097174697\n",
"Training epoch 67, cost is 0.00624586574107\n",
"Training epoch 68, cost is 0.00616303605699\n",
"Training epoch 69, cost is 0.00608239222218\n",
"Training epoch 70, cost is 0.00600384850723\n",
"Training epoch 71, cost is 0.00592732361856\n",
"Training epoch 72, cost is 0.00585274041468\n",
"Training epoch 73, cost is 0.00578002564403\n",
"Training epoch 74, cost is 0.00570910970244\n",
"Training epoch 75, cost is 0.00563992640855\n",
"Training epoch 76, cost is 0.00557241279558\n",
"Training epoch 77, cost is 0.00550650891826\n",
"Training epoch 78, cost is 0.00544215767338\n",
"Training epoch 79, cost is 0.00537930463313\n",
"Training epoch 80, cost is 0.00531789788995\n",
"Training epoch 81, cost is 0.00525788791209\n",
"Training epoch 82, cost is 0.00519922740903\n",
"Training epoch 83, cost is 0.00514187120596\n",
"Training epoch 84, cost is 0.00508577612652\n",
"Training epoch 85, cost is 0.00503090088341\n",
"Training epoch 86, cost is 0.004977205976\n",
"Training epoch 87, cost is 0.00492465359457\n",
"Training epoch 88, cost is 0.00487320753065\n",
"Training epoch 89, cost is 0.00482283309306\n",
"Training epoch 90, cost is 0.00477349702908\n",
"Training epoch 91, cost is 0.00472516745059\n",
"Training epoch 92, cost is 0.00467781376465\n",
"Training epoch 93, cost is 0.00463140660836\n",
"Training epoch 94, cost is 0.00458591778748\n",
"Training epoch 95, cost is 0.00454132021887\n",
"Training epoch 96, cost is 0.00449758787615\n",
"Training epoch 97, cost is 0.0044546957386\n",
"Training epoch 98, cost is 0.00441261974299\n",
"Training epoch 99, cost is 0.00437133673815\n",
"Training epoch 100, cost is 0.00433082444209\n",
"Training epoch 101, cost is 0.00429106140152\n",
"Training epoch 102, cost is 0.00425202695366\n",
"Training epoch 103, cost is 0.0042137011901\n",
"Training epoch 104, cost is 0.00417606492261\n",
"Training epoch 105, cost is 0.00413909965087\n",
"Training epoch 106, cost is 0.00410278753183\n",
"Training epoch 107, cost is 0.00406711135076\n",
"Training epoch 108, cost is 0.0040320544938\n",
"Training epoch 109, cost is 0.00399760092191\n",
"Training epoch 110, cost is 0.00396373514622\n",
"Training epoch 111, cost is 0.00393044220462\n",
"Training epoch 112, cost is 0.00389770763946\n",
"Training epoch 113, cost is 0.00386551747651\n",
"Training epoch 114, cost is 0.00383385820486\n",
"Training epoch 115, cost is 0.00380271675787\n",
"Training epoch 116, cost is 0.003772080495\n",
"Training epoch 117, cost is 0.00374193718461\n",
"Training epoch 118, cost is 0.00371227498749\n",
"Training epoch 119, cost is 0.00368308244125\n",
"Training epoch 120, cost is 0.00365434844542\n",
"Training epoch 121, cost is 0.00362606224729\n",
"Training epoch 122, cost is 0.0035982134283\n",
"Training epoch 123, cost is 0.00357079189125\n",
"Training epoch 124, cost is 0.0035437878479\n",
"Training epoch 125, cost is 0.00351719180728\n",
"Training epoch 126, cost is 0.00349099456445\n",
"Training epoch 127, cost is 0.0034651871898\n",
"Training epoch 128, cost is 0.00343976101883\n",
"Training epoch 129, cost is 0.00341470764234\n",
"Training epoch 130, cost is 0.00339001889713\n",
"Training epoch 131, cost is 0.00336568685701\n",
"Training epoch 132, cost is 0.0033417038243\n",
"Training epoch 133, cost is 0.00331806232159\n",
"Training epoch 134, cost is 0.00329475508395\n",
"Training epoch 135, cost is 0.00327177505142\n",
"Training epoch 136, cost is 0.00324911536179\n",
"Training epoch 137, cost is 0.00322676934377\n",
"Training epoch 138, cost is 0.00320473051033\n",
"Training epoch 139, cost is 0.00318299255243\n",
"Training epoch 140, cost is 0.00316154933291\n",
"Training epoch 141, cost is 0.00314039488067\n",
"Training epoch 142, cost is 0.00311952338513\n",
"Training epoch 143, cost is 0.0030989291908\n",
"Training epoch 144, cost is 0.00307860679219\n",
"Training epoch 145, cost is 0.00305855082882\n",
"Training epoch 146, cost is 0.00303875608051\n",
"Training epoch 147, cost is 0.00301921746279\n",
"Training epoch 148, cost is 0.00299993002251\n",
"Training epoch 149, cost is 0.00298088893364\n",
"Training epoch 150, cost is 0.0029620894932\n",
"Training epoch 151, cost is 0.00294352711738\n",
"Training epoch 152, cost is 0.00292519733774\n",
"Training epoch 153, cost is 0.00290709579766\n",
"Training epoch 154, cost is 0.00288921824879\n",
"Training epoch 155, cost is 0.00287156054777\n",
"Training epoch 156, cost is 0.00285411865297\n",
"Training epoch 157, cost is 0.00283688862138\n",
"Training epoch 158, cost is 0.00281986660562\n",
"Training epoch 159, cost is 0.00280304885106\n",
"Training epoch 160, cost is 0.00278643169305\n",
"Training epoch 161, cost is 0.0027700115542\n",
"Training epoch 162, cost is 0.00275378494186\n",
"Training epoch 163, cost is 0.00273774844554\n",
"Training epoch 164, cost is 0.00272189873458\n",
"Training epoch 165, cost is 0.00270623255577\n",
"Training epoch 166, cost is 0.00269074673115\n",
"Training epoch 167, cost is 0.00267543815582\n",
"Training epoch 168, cost is 0.00266030379587\n",
"Training epoch 169, cost is 0.00264534068632\n",
"Training epoch 170, cost is 0.00263054592924\n",
"Training epoch 171, cost is 0.00261591669178\n",
"Training epoch 172, cost is 0.00260145020441\n",
"Training epoch 173, cost is 0.00258714375917\n",
"Training epoch 174, cost is 0.0025729947079\n",
"Training epoch 175, cost is 0.00255900046065\n",
"Training epoch 176, cost is 0.00254515848408\n",
"Training epoch 177, cost is 0.0025314662999\n",
"Training epoch 178, cost is 0.00251792148341\n",
"Training epoch 179, cost is 0.00250452166204\n",
"Training epoch 180, cost is 0.00249126451394\n",
"Training epoch 181, cost is 0.00247814776667\n",
"Training epoch 182, cost is 0.00246516919585\n",
"Training epoch 183, cost is 0.00245232662394\n",
"Training epoch 184, cost is 0.00243961791895\n",
"Training epoch 185, cost is 0.00242704099332\n",
"Training epoch 186, cost is 0.00241459380272\n",
"Training epoch 187, cost is 0.00240227434497\n",
"Training epoch 188, cost is 0.00239008065892\n",
"Training epoch 189, cost is 0.00237801082343\n",
"Training epoch 190, cost is 0.00236606295637\n",
"Training epoch 191, cost is 0.00235423521358\n",
"Training epoch 192, cost is 0.00234252578797\n",
"Training epoch 193, cost is 0.00233093290857\n",
"Training epoch 194, cost is 0.0023194548396\n",
"Training epoch 195, cost is 0.00230808987968\n",
"Training epoch 196, cost is 0.0022968363609\n",
"Training epoch 197, cost is 0.00228569264804\n",
"Training epoch 198, cost is 0.00227465713776\n",
"Training epoch 199, cost is 0.00226372825784\n",
"[ 9.99058274e-01 8.68662900e-05 8.54859850e-04]\n",
"[ 0.00436986 0.99442857 0.00120157]\n",
"[ 2.38821403e-05 3.78696876e-04 9.99597421e-01]\n"
]
}
],
"source": [
"if __name__ == \"__main__\":\n",
" test_lr()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 2",
"language": "python",
"name": "python2"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.6"
}
},
"nbformat": 4,
"nbformat_minor": 1
}
Display the source blob
Display the rendered blob
Raw
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Display the source blob
Display the rendered blob
Raw
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Display the source blob
Display the rendered blob
Raw
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment