1. 함수 정의
import numpy as np
def sigmoid(x):
y_hat = 1 / (1 + np.exp(-x))
return y_hat
x = 0
y = 1
w = 0.9
b = 0.2
for i in range(1501):
y_hat = sigmoid(x * w + b)
error = y - y_hat
w = w + 0.1 * error
b = b + 0.1 * error
if i % 100 == 0:
print(i, error, y_hat)
1. 실습용 데이터 생성
import numpy as np
np.set_printoptions(suppress = True, precision = 3)
#Input : X (4 * 3)
X = np.array([[0, 0, 1],
[0, 1, 1],
[1, 0, 1],
[1, 1, 1]])
X
#Output : y (4 * 1)
y = np.array([0., 1., 1., 0.]).reshape(4, 1)
y
# W1 (3 * 4)
np.random.seed(2045)
W1 = np.random.rand(3, 4)
W1
#W2 (4 * 1)
np.random.seed(2046)
W2 = np.random.rand(4)
W2 = W2.reshape(4, 1)
W2
#y_hat (4 * 1)
np.random.seed(2045)
y_hat = np.random.rand(4).reshape(4, 1)
y_hat
# Layer1 (4 * 4)
Layer1 = np.ones([4, 4])
Layer1
2. 함수 정의
#sigmoid( )
def sigmoid(x):
y_hat = 1 / (1 + np.exp(-x))
return y_hat
#d_sigmoid( ) : sigmoid( ) 미분함수
def d_sigmoid(x):
dy = x * (1.0 - x)
return dy
#Loss function
# Mean Squared Error
# def loss_function(y, y_hat):
# Loss = np.mean((y - y_hat) ** 2)
# return Loss
# Binary Cross Entropy Error
def loss_function(y, y_hat):
Loss = -np.mean((y * np.log(y_hat) + (1 - y) * np.log(1 - y_hat)))
return Loss
3. 순방향과 역방향 함수 정의
3-1. Forward_Propagation
def forwardProp(X, W1, Layer1, W2, y_hat):
Layer1 = sigmoid(np.dot(X, W1))
y_hat = sigmoid(np.dot(Layer1, W2))
return Layer1, y_hat
3-2. Back_Propagation
def backProp(X, y, y_hat, Layer1, W1, W2):
d_W2 = np.dot(np.transpose(Layer1), ((y_hat - y) * d_sigmoid(y_hat)))
d_W1 = np.dot(((y_hat - y) * d_sigmoid(y_hat)), np.transpose(W2))
d_W1 = d_W1 * d_sigmoid(Layer1)
d_W1 = np.dot(np.transpose(X), d_W1)
W1 = W1 - 0.8 * d_W1
W2 = W2 - 0.8 * d_W2
return y_hat, Layer1, W1, W2
4. 오차역전파를 적용한 Gradient Descent
Loss_Record = []
4-1. Learning with Error Backpropagation
for k in range(0, 1000):
Layer1, y_hat = forwardProp(X, W1, Layer1, W2, y_hat)
y_hat, Layer1, W1, W2 = backProp(X, y, y_hat, Layer1, W1, W2)
Loss_Record.append(loss_function(y, y_hat))
4-2. Parameter Update Check
W1
W2
# y_hat.round()
y_hat
4-3.Visualization
import matplotlib.pyplot as plt
plt.figure(figsize = (9, 6))
plt.plot(Loss_Record)
plt.show()
DNN(Deep Neural Network)-IMDB _Binary Classification (0) | 2022.06.17 |
---|---|
TensorFlow (0) | 2022.06.13 |
Artificial Neural Network(ANN)-Softmax Activation (1) | 2022.06.13 |
Artificial Neural Network(ANN)-Neural Network Model (다중분류) (0) | 2022.06.13 |
Artificial Neural Network(ANN)-MLP Gradient Descent (1) | 2022.06.13 |