Skip to content

Instantly share code, notes, and snippets.

@icehongssii
Last active March 15, 2019 05:54
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save icehongssii/45292cacc7e2c95a8813eb6642e6b0c4 to your computer and use it in GitHub Desktop.
Save icehongssii/45292cacc7e2c95a8813eb6642e6b0c4 to your computer and use it in GitHub Desktop.
testing linear regression for deep learning
import tensorflow as tf
x_train = [1,2,3]
y_train = [1,2,3]
# H(x) = Wx+b
W = tf.Variable(tf.random_normal([1]), name="weight")
b = tf.Variable(tf.random_normal([1]), name="bias")
#our hypo
hypothesis = x_train*W+b
#cost(w,b) 제곱차
cost = tf.reduce_mean(tf.square(hypothesis-y_train))
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01)
train = optimizer.minimize(cost) #하나의 노드가됨
#그래프 실행위한 세션만들기
sess = tf.Session()
# to use W,b, it has to be initialized first
sess.run(tf.global_variables_initializer())
#train 노드가 실행시켜야함 cost, hypto, w,b모두 실행시킬 수 있다
for step in range(2001):
sess.run(train)
if step % 20 == 0 :
print(step,sess.run(cost),sess.run(W), sess.run(b))
@icehongssii
Copy link
Author

0 6.4561496 [-0.51385033] [0.8077184]
20 0.25518575 [0.38993922] [1.1360402]
40 0.18091412 [0.4978936] [1.1175276]
60 0.16384818 [0.52904] [1.0683291]
80 0.1488053 [0.5518922] [1.0184376]
100 0.13514732 [0.57302004] [0.97060496]
120 0.12274291 [0.5930931] [0.9249929]
140 0.11147713 [0.61221683] [0.881522]
160 0.10124532 [0.63044137] [0.8400937]
180 0.0919526 [0.6478092] [0.8006124]
200 0.083512835 [0.66436094] [0.7629866]
220 0.07584768 [0.6801347] [0.7271291]
240 0.06888608 [0.6951671] [0.6929567]
260 0.06256344 [0.70949316] [0.6603902]
280 0.05682114 [0.7231459] [0.6293544]
300 0.051605847 [0.73615706] [0.599777]
320 0.04686926 [0.74855673] [0.5715896]
340 0.042567406 [0.7603737] [0.54472697]
360 0.038660374 [0.7716353] [0.51912665]
380 0.035112005 [0.7823676] [0.49472973]
400 0.03188927 [0.79259545] [0.47147927]
420 0.028962335 [0.8023427] [0.44932148]
440 0.026304087 [0.81163186] [0.42820504]
460 0.023889774 [0.8204845] [0.408081]
480 0.021697097 [0.828921] [0.38890272]
500 0.019705644 [0.83696103] [0.37062567]
520 0.01789697 [0.8446233] [0.35320765]
540 0.016254311 [0.85192543] [0.33660817]
560 0.014762442 [0.8588844] [0.3207889]
580 0.0134074865 [0.86551625] [0.30571303]
600 0.0121769095 [0.87183654] [0.2913457]
620 0.011059246 [0.87785983] [0.27765346]
640 0.010044183 [0.8835999] [0.26460478]
660 0.009122293 [0.8890703] [0.25216934]
680 0.008285015 [0.8942836] [0.24031831]
700 0.007524586 [0.8992519] [0.22902422]
720 0.006833933 [0.9039867] [0.21826088]
740 0.006206686 [0.9084989] [0.20800342]
760 0.0056370157 [0.9127991] [0.19822802]
780 0.0051196334 [0.9168973] [0.188912]
800 0.0046497327 [0.9208029] [0.1800338]
820 0.0042229593 [0.9245248] [0.17157286]
840 0.0038353521 [0.9280718] [0.16350959]
860 0.0034833364 [0.93145216] [0.15582527]
880 0.0031636169 [0.9346736] [0.14850207]
900 0.0028732494 [0.9377437] [0.14152308]
920 0.0026095351 [0.9406696] [0.13487199]
940 0.002370024 [0.9434579] [0.12853348]
960 0.0021524923 [0.9461152] [0.12249289]
980 0.0019549269 [0.9486475] [0.11673617]
1000 0.0017754977 [0.95106095] [0.11125004]
1020 0.0016125349 [0.95336086] [0.10602168]
1040 0.0014645304 [0.95555276] [0.10103904]
1060 0.0013301085 [0.9576416] [0.09629059]
1080 0.0012080263 [0.9596324] [0.09176528]
1100 0.0010971477 [0.9615295] [0.08745262]
1120 0.0009964483 [0.9633374] [0.08334266]
1140 0.0009049876 [0.9650605] [0.07942586]
1160 0.0008219253 [0.96670246] [0.07569312]
1180 0.000746486 [0.9682673] [0.07213582]
1200 0.00067797 [0.9697586] [0.06874572]
1220 0.0006157444 [0.9711799] [0.06551491]
1240 0.00055922795 [0.9725343] [0.06243596]
1260 0.00050790067 [0.97382504] [0.05950172]
1280 0.00046128404 [0.9750552] [0.05670534]
1300 0.0004189443 [0.97622746] [0.05404042]
1320 0.00038049102 [0.97734475] [0.05150073]
1340 0.0003455714 [0.97840947] [0.04908037]
1360 0.00031384986 [0.9794241] [0.0467738]
1380 0.00028504297 [0.98039114] [0.04457559]
1400 0.0002588817 [0.9813127] [0.04248068]
1420 0.00023512129 [0.9821909] [0.04048423]
1440 0.00021354218 [0.9830279] [0.03858162]
1460 0.00019393938 [0.9838255] [0.03676842]
1480 0.00017614216 [0.98458564] [0.03504045]
1500 0.00015997294 [0.9853101] [0.03339368]
1520 0.0001452903 [0.9860004] [0.03182429]
1540 0.00013195425 [0.9866584] [0.03032865]
1560 0.000119844575 [0.9872853] [0.02890332]
1580 0.00010884446 [0.9878829] [0.02754498]
1600 9.885433e-05 [0.9884524] [0.02625046]
1620 8.9781846e-05 [0.988995] [0.02501681]
1640 8.154032e-05 [0.9895121] [0.02384117]
1660 7.405732e-05 [0.990005] [0.02272079]
1680 6.726027e-05 [0.9904749] [0.02165302]
1700 6.108659e-05 [0.9909225] [0.02063538]
1720 5.5480403e-05 [0.9913491] [0.01966557]
1740 5.0387e-05 [0.9917556] [0.01874136]
1760 4.5762557e-05 [0.9921431] [0.01786058]
1780 4.1561798e-05 [0.99251235] [0.01702119]
1800 3.77472e-05 [0.99286425] [0.01622124]
1820 3.428276e-05 [0.99319965] [0.0154589]
1840 3.113669e-05 [0.99351925] [0.01473236]
1860 2.8278317e-05 [0.99382377] [0.01403999]
1880 2.5682262e-05 [0.99411404] [0.01338018]
1900 2.332607e-05 [0.99439067] [0.01275136]
1920 2.1184465e-05 [0.99465424] [0.01215211]
1940 1.9240679e-05 [0.9949055] [0.01158101]
1960 1.7474229e-05 [0.99514496] [0.01103673]
1980 1.5870757e-05 [0.99537313] [0.01051803]
2000 1.4414135e-05 [0.99559057] [0.0100237]

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment