Neural Network
인공신경망(Artificial Neural Network)
인간의 뇌 구조를 모방하여 만들어짐(신경세포:Neuron)
퍼셉트론
1.수치미분(Numerical Derivative)
gradient( ) 함수 정의
import numpy as np
def gradient(machine, param):
if param.ndim == 1:
temp_param = param
delta = 0.00005
learned_param = np.zeros(param.shape)
for index in range(len(param)):
target_param = float(temp_param[index])
temp_param[index] = target_param + delta
param_plus_delta = machine(temp_param)
temp_param[index] = target_param - delta
param_minus_delta = machine(temp_param)
learned_param[index] = (param_plus_delta - param_minus_delta ) / (2 * delta)
temp_param[index] = target_param
return learned_param
elif param.ndim == 2:
temp_param = param
delta = 0.00005
learned_param = np.zeros(param.shape)
rows = param.shape[0]
columns = param.shape[1]
for row in range(rows):
for column in range(columns):
target_param = float(temp_param[row, column])
temp_param[row, column] = target_param + delta
param_plus_delta = machine(temp_param)
temp_param[row, column] = target_param - delta
param_minus_delta = machine(temp_param)
learned_param[row, column] = (param_plus_delta - param_minus_delta) / (2 * delta)
temp_param[row, column] = target_param
return learned_param
2. Logic Gate( ) - 'AND', 'OR', 'NAND'
2-1. sigmoid( ) 함수 정의
import numpy as np
def sigmoid(x):
y_hat = 1 / (1 + np.exp(-x))
return y_hat
2-2. LogicGate 클래스 선언
class LogicGate:
def __init__(self, gate_Type, X_input, y_output):
# gate_Type 문자열 지정 Member
self.Type = gate_Type
# X_input, y_output Member 초기화
self.X_input = X_input.reshape(4, 2)
self.y_output = y_output.reshape(4, 1)
# W, b Member 초기화
self.W = np.random.rand(2, 1)
self.b = np.random.rand(1)
# learning_rate Member 지정
self.learning_rate = 0.01
# Cost_Function(CEE) Method
def cost_func(self):
z = np.dot(self.X_input, self.W) + self.b
y_hat = sigmoid(z)
delta = 0.00001
return -np.sum(self.y_output * np.log(y_hat + delta) + (1 - self.y_output) * np.log((1 - y_hat) + delta))
# Learning Method
def fit(self):
machine = lambda x : self.cost_func()
print('Initial Cost = ', self.cost_func())
for step in range(10001):
self.W = self.W - self.learning_rate * gradient(machine, self.W)
self.b = self.b - self.learning_rate * gradient(machine, self.b)
if (step % 1000 == 0):
print('Step = ', step, 'Cost = ', self.cost_func())
# Predict Method
def predict(self, input_data):
z = np.dot(input_data, self.W) + self.b
y_prob = sigmoid(z)
if y_prob > 0.5:
result = 1
else:
result = 0
return y_prob, result
2-3. AND_Gate
X_input = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
y_output = np.array([0, 0, 0, 1])
AND_Gate = LogicGate('AND_GATE', X_input, y_output)
AND_Gate.fit()
print(AND_Gate.Type, '\n')
test_data = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
for input_data in test_data:
(sigmoid_val, logical_val) = AND_Gate.predict(input_data)
print(input_data, ' = ', logical_val)
AND_GATE
[0 0] = 0
[0 1] = 0
[1 0] = 0
[1 1] = 1
2-4. OR_Gate
X_input = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
y_output = np.array([0, 1, 1, 1])
OR_Gate = LogicGate('OR_GATE', X_input, y_output)
OR_Gate.fit()
print(OR_Gate.Type, '\n')
test_data = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
for input_data in test_data:
(sigmoid_val, logical_val) = OR_Gate.predict(input_data)
print(input_data, ' = ', logical_val)
OR_GATE
[0 0] = 0
[0 1] = 1
[1 0] = 1
[1 1] = 1
2-5. NAND_Gate
X_input = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
y_output = np.array([1, 1, 1, 0])
NAND_Gate = LogicGate('NAND_GATE', X_input, y_output)
NAND_Gate.fit()
print(NAND_Gate.Type, '\n')
test_data = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
for input_data in test_data:
(sigmoid_val, logical_val) = NAND_Gate.predict(input_data)
print(input_data, ' = ', logical_val)
NAND_GATE
[0 0] = 1
[0 1] = 1
[1 0] = 1
[1 1] = 0
3. XOR_Gate Issue
3-1. XOR_Gate Failure
X_input = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
y_output = np.array([0, 1, 1, 0])
XOR_Gate = LogicGate('XOR_GATE', X_input, y_output)
XOR_Gate.fit()
print(XOR_Gate.Type, '\n')
test_data = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
for input_data in test_data:
(sigmoid_val, logical_val) = XOR_Gate.predict(input_data)
print(input_data, ' = ', logical_val)
XOR_GATE
[0 0] = 0
[0 1] = 0
[1 0] = 0
[1 1] = 1
3-2. XOR_Gate Succeed
input_data = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
HL1_1 = [] # NAND 출력
HL1_2 = [] # OR 출력
new_input_data = [] # AND 입력
final_output = [] # AND(XOR) 출력
for index in range(len(input_data)):
HL1_1 = NAND_Gate.predict(input_data[index]) # NAND 출력
HL1_2 = OR_Gate.predict(input_data[index]) # OR 출력
new_input_data.append(HL1_1[-1]) # AND 입력
new_input_data.append(HL1_2[-1]) # AND 입력
(sigmoid_val, logical_val) = AND_Gate.predict(np.array(new_input_data))
final_output.append(logical_val) # AND(XOR) 출력
new_input_data = [] # AND 입력 초기화
print(XOR_Gate.Type, '\n')
for index in range(len(input_data)):
print(input_data[index], ' = ', final_output[index])
XOR_GATE
[0 0] = 0
[0 1] = 1
[1 0] = 1
[1 1] = 0
3-3. XOR_Gate Learning
# XOR_Gate Class
class XOR_Gate:
def __init__(self, gate_Type, X_input, y_output):
# gate_Type 문자열 지정 Member
self.Type = gate_Type
# X_input, y_output Member 초기화
self.X_input = X_input.reshape(4, 2)
self.y_output = y_output.reshape(4, 1)
# W_1, b_1 Member 초기화
self.W_1 = np.random.rand(2, 2)
self.b_1 = np.random.rand(2)
# W_2, b_2 Member 초기화
self.W_2 = np.random.rand(2, 1)
self.b_2 = np.random.rand(1)
# learning_rate Member 지정
self.learning_rate = 0.01
# Cost_Function(CEE) Method
def cost_func(self):
z_1 = np.dot(self.X_input, self.W_1) + self.b_1 # Hidden Layer
a_1 = sigmoid(z_1)
z_2 = np.dot(a_1, self.W_2) + self.b_2 # Output Layer
y_hat = sigmoid(z_2)
delta = 0.00001
return -np.sum(self.y_output * np.log(y_hat + delta) + (1 - self.y_output) * np.log((1 - y_hat) + delta))
# Learning Method
def fit(self):
machine = lambda x : self.cost_func()
print('Initial Cost = ', self.cost_func())
for step in range(50001):
self.W_1 = self.W_1- self.learning_rate * gradient(machine, self.W_1)
self.b_1 = self.b_1 - self.learning_rate * gradient(machine, self.b_1)
self.W_2 = self.W_2 - self.learning_rate * gradient(machine, self.W_2)
self.b_2 = self.b_2 - self.learning_rate * gradient(machine, self.b_2)
if (step % 1000 == 0):
print('Step = ', step, 'Cost = ', self.cost_func())
# Predict Method
def predict(self, input_data):
z_1 = np.dot(input_data, self.W_1) + self.b_1 # Hidden Layer
a_1 = sigmoid(z_1)
z_2 = np.dot(a_1, self.W_2) + self.b_2 # Output Layer
y_prob = sigmoid(z_2)
if y_prob > 0.5:
result = 1
else:
result = 0
return y_prob, result
X_input = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
y_output = np.array([0, 1, 1, 0])
XOR_Gate_2 = XOR_Gate('XOR_GATE', X_input, y_output)
XOR_Gate_2.fit()
print(XOR_Gate_2.Type, '\n')
test_data = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
for input_data in test_data:
(sigmoid_val, logical_val) = XOR_Gate_2.predict(input_data)
print(input_data, ' = ', logical_val)
XOR_GATE
[0 0] = 0
[0 1] = 1
[1 0] = 1
[1 1] = 0
TensorFlow (0) | 2022.06.13 |
---|---|
Error Backpropagation (0) | 2022.06.13 |
Artificial Neural Network(ANN)-Softmax Activation (1) | 2022.06.13 |
Artificial Neural Network(ANN)-Neural Network Model (다중분류) (0) | 2022.06.13 |
Artificial Neural Network(ANN)-MLP Gradient Descent (1) | 2022.06.13 |