Skip to content

Latest commit

 

History

History
168 lines (156 loc) · 6.05 KB

Documentation.md

File metadata and controls

168 lines (156 loc) · 6.05 KB
Menu
  1. Activation Functions (CPU)
  2. Loss Functions (CPU)
  3. Neural Network Functions (CPU)

Activation Functions (CPU)

List of available activation functions and their gradients in activation.py, as well as how to use them.

Sigmoid

import crysx_nn.activation as activation
import numpy as np
# inputs should be a 2d array where the rows correspond to the samples and the columns correspond to the nodes.
inputs = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
output = activation.Sigmoid(inputs)
gradient_wrt_inputs = activation.Sigmoid_grad(inputs)
print('Output\n',output)
print('Gradient\n',gradient_wrt_inputs)

Tanh

import crysx_nn.activation as activation
import numpy as np
# inputs should be a 2d array where the rows correspond to the samples and the columns correspond to the nodes.
inputs = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
output = activation.Tanh(inputs)
gradient_wrt_inputs = activation.Tanh_grad(inputs)
print('Output\n',output)
print('Gradient\n',gradient_wrt_inputs)

Tanh_offset

import crysx_nn.activation as activation
import numpy as np
# inputs should be a 2d array where the rows correspond to the samples and the columns correspond to the nodes.
inputs = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
output = activation.Tanh_offset(inputs)
gradient_wrt_inputs = activation.Tanh_offset_grad(inputs)
print('Output\n',output)
print('Gradient\n',gradient_wrt_inputs)

Identity

import crysx_nn.activation as activation
import numpy as np
# inputs should be a 2d array where the rows correspond to the samples and the columns correspond to the nodes.
inputs = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
output = activation.Identity(inputs)
gradient_wrt_inputs = activation.Identity_grad(inputs)
print('Output\n',output)
print('Gradient\n',gradient_wrt_inputs)

ReLU

import crysx_nn.activation as activation
import numpy as np
# inputs should be a 2d array where the rows correspond to the samples and the columns correspond to the nodes.
inputs = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
output = activation.ReLU(inputs)
gradient_wrt_inputs = activation.ReLU_grad(inputs)

Softplus

import crysx_nn.activation as activation
import numpy as np
# inputs should be a 2d array where the rows correspond to the samples and the columns correspond to the nodes.
inputs = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
output = activation.Softplus(inputs)
gradient_wrt_inputs = activation.Softplus_grad(inputs)
print('Output\n',output)
print('Gradient\n',gradient_wrt_inputs)

Softmax

import crysx_nn.activation as activation
import numpy as np
# inputs should be a 2d array where the rows correspond to the samples and the columns correspond to the nodes.
inputs = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
output = activation.Softmax(inputs)
jacobian_wrt_inputs = activation.Softmax_grad(inputs) 
print('Output\n',output)
print('Gradient\n',jacobian_wrt_inputs)

(back to top)

Loss Functions (CPU)

List of available loss functions and their gradients loss.py

Mean Absolute Error (MSE_loss)

from crysx_nn.loss import MAE_loss, MAE_loss_grad
import numpy as np
# predictions and targets should be 2d arrays where the rows correspond to the samples and the columns correspond to the output nodes.
predictions = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
targets = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
error = MAE_loss(predictions, targets)
gradient_wrt_predictions = MAE_loss_grad(predictions, targets)

Mean Squared Error (MSE_loss)

from crysx_nn.loss import MSE_loss, MSE_loss_grad
import numpy as np
# predictions and targets should be 2d arrays where the rows correspond to the samples and the columns correspond to the output nodes.
predictions = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
targets = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
error = MSE_loss(predictions, targets)
gradient_wrt_predictions = loss.MSE_loss_grad(predictions, targets)

Binary Cross Entropy (BCE_loss)

from crysx_nn.loss import MSE_loss, MSE_loss_grad
import numpy as np
# predictions and targets should be 2d arrays where the rows correspond to the samples and the columns correspond to the output nodes.
predictions = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
targets = np.random.randn((5,4)) # BatchSize=nSamples=5; nodes=4
error = BCE_loss(predictions, targets)
gradient_wrt_predictions = loss.BCE_loss_grad(predictions, targets)

Categorical Cross Entropy (CCE_loss)

(back to top)

Neural Network Functions (CPU)

List of available neural network functions and their gradients network.py, as well as how to use them.

Visualize

Optimize

Forward Feed

Backpropagation

(back to top)