View pydl_funcs.py
import numpy as np
def step(x):
return np.array( x > 0, dtype=np.int )
def sigmoid(x):
y = 1 / (1 + np.exp(-x))
return y
def relu(x):
View sketchup_exportFbx.rb
def exportFbx(filename)
if Sketchup.open_file(filename) then
model = Sketchup.active_model
dir = File.dirname(filename)
basename = File.basename(filename,'.*')
#model.save_thumbnail "#{dir}/#{basename}.png"
model.export "#{dir}/#{basename}.fbx", false
else
puts "Failed to Load fbx."
end
View pydl_costfuncs.py
import sys, os
import numpy as np
# 정답표, One hot label
t = np.array([
[0,0,1,0,0,0,0,0,0,0], # 2
[0,0,0,1,0,0,0,0,0,0], # 3
[0,0,0,0,0,1,0,0,0,0], # 5
[1,0,0,0,0,0,0,0,0,0], # 0
[0,1,0,0,0,0,0,0,0,0] # 1
View pydl_neuralnet_mnist.py
import sys, os
import numpy as np
import pickle
import random
from PIL import Image
# https://github.com/oreilly-japan/deep-learning-from-scratch/common, dataset
sys.path.append(os.pardir)
from dataset.mnist import load_mnist
from common.functions import sigmoid, softmax
View maya_copyPivot.mel
global proc TZ_Transform_CopyPivot(string $src, string $dest) {
float $scalePivot[3];
float $rotatePivot[3];
$scalePivot = `xform -query -worldSpace -scalePivot $src`;
$rotatePivot = `xform -query -worldSpace -rotatePivot $src`;
xform -worldSpace -scalePivot $scalePivot[0] $scalePivot[1] $scalePivot[2] $dest;
xform -worldSpace -rotatePivot $rotatePivot[0] $rotatePivot[1] $rotatePivot[2] $dest;
}
View maya_subdiv.mel
// 0: Smooth
// 1: Jelly
// 2: Round Rect 1
// 3: Round Rect 2
// 4: Bevel
// 5: Hard
global proc TZ_Subdiv_SetLevel(int $level)
{
int $minSubdivLevel = max(5, $level); // 최저 Subdiv레벨: 5
View pydl_gradient_descent.py
# 머신러닝, 수치 미분, 편미분, 기울기gradient, 경사 하강법gradient descent
# 수치 미분numerical differentiation, 전방 차분
def numerical_diff_bad(f, x):
h = 10e-50 # np.float32(1e-50) => 0.0, 반올림 오차 문제.
return (f(x+h) - f(x)) / h
# 수치 미분, 중심 차분
def numerical_diff(f, x):
h = 1e-4 # 0.0001
View pydl_mnist_train.py
import sys, os
sys.path.append(os.pardir)
import numpy as np
from common.functions import *
from common.gradient import *
from dataset.mnist import load_mnist
class TwoLayerNet:
def __init__(self, inputSize, hiddenSize, outputSize, weight=0.01):
self.w1 = weight * np.random.randn(inputSize, hiddenSize)
View pydl_backpropagation_layer1.py
class AddLayer:
def __init__(self):
pass
def forward(self, x, y):
return x + y
def backward(self, dout):
dx = dout * 1
dy = dout * 1
return dx, dy
View pydl_backpropagation_layer2.py
import numpy as np
'''
class ReluLayer:
def __init__(self):
self.x = None
def forward(self, x):
self.x = x
if x > 0:
return x