47 lines
1.1 KiB
Python
47 lines
1.1 KiB
Python
import numpy as np
|
|
|
|
np.random.seed(10)
|
|
|
|
|
|
class ReLU:
|
|
def __call__(self, x):
|
|
return np.maximum(0, x)
|
|
|
|
def derivative(self, x):
|
|
return (x > 0).astype(float)
|
|
|
|
|
|
class Sigmoid:
|
|
def __call__(self, x):
|
|
return 1 / (1 + np.exp(-x))
|
|
|
|
def derivative(self, x):
|
|
sig = self.__call__(x)
|
|
return sig * (1 - sig)
|
|
|
|
|
|
class Layer:
|
|
def __init__(self, n_inputs, n_neurons, activation_function):
|
|
self.inputs = np.zeros((n_inputs, 1))
|
|
self.outputs = np.zeros((n_neurons, 1))
|
|
self.weights = np.random.uniform(-1, 1, (n_neurons, n_inputs))
|
|
self.biases = np.random.uniform(-1, 1, (n_neurons, 1))
|
|
self.activation = activation_function
|
|
|
|
def forward(self, inputs):
|
|
self.inputs = np.array(inputs).reshape(-1, 1)
|
|
self.outputs = np.dot(self.weights, self.inputs) + self.biases
|
|
return self.activation(self.outputs)
|
|
|
|
|
|
relu = ReLU()
|
|
sigmoid = Sigmoid()
|
|
|
|
input_size = 2
|
|
hidden_size = 3
|
|
output_size = 1
|
|
|
|
hidden_1 = Layer(input_size, hidden_size, relu)
|
|
hidden_2 = Layer(hidden_size, hidden_size, relu)
|
|
output_layer = Layer(hidden_size, output_size, sigmoid)
|