python/activations.py

23 lines
367 B
Python

import numpy as np
class ReLU:
def __call__(self, x):
return np.maximum(0, x)
def derivative(self, x):
return (x > 0).astype(float)
class Sigmoid:
def __call__(self, x):
return 1 / (1 + np.exp(-x))
def derivative(self, x):
sig = self.__call__(x)
return sig * (1 - sig)
relu = ReLU()
sigmoid = Sigmoid()