Menu
List of available activation functions and their gradients in activation.py
, as well as how to use them.
import crysx_nn.activation as activation
import numpy as np
# inputs should be a 2d array where the rows correspond to the samples and the columns correspond to the nodes.
inputs = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
output = activation.Sigmoid(inputs)
gradient_wrt_inputs = activation.Sigmoid_grad(inputs)
print('Output\n',output)
print('Gradient\n',gradient_wrt_inputs)
import crysx_nn.activation as activation
import numpy as np
# inputs should be a 2d array where the rows correspond to the samples and the columns correspond to the nodes.
inputs = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
output = activation.Tanh(inputs)
gradient_wrt_inputs = activation.Tanh_grad(inputs)
print('Output\n',output)
print('Gradient\n',gradient_wrt_inputs)
import crysx_nn.activation as activation
import numpy as np
# inputs should be a 2d array where the rows correspond to the samples and the columns correspond to the nodes.
inputs = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
output = activation.Tanh_offset(inputs)
gradient_wrt_inputs = activation.Tanh_offset_grad(inputs)
print('Output\n',output)
print('Gradient\n',gradient_wrt_inputs)
import crysx_nn.activation as activation
import numpy as np
# inputs should be a 2d array where the rows correspond to the samples and the columns correspond to the nodes.
inputs = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
output = activation.Identity(inputs)
gradient_wrt_inputs = activation.Identity_grad(inputs)
print('Output\n',output)
print('Gradient\n',gradient_wrt_inputs)
import crysx_nn.activation as activation
import numpy as np
# inputs should be a 2d array where the rows correspond to the samples and the columns correspond to the nodes.
inputs = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
output = activation.ReLU(inputs)
gradient_wrt_inputs = activation.ReLU_grad(inputs)
import crysx_nn.activation as activation
import numpy as np
# inputs should be a 2d array where the rows correspond to the samples and the columns correspond to the nodes.
inputs = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
output = activation.Softplus(inputs)
gradient_wrt_inputs = activation.Softplus_grad(inputs)
print('Output\n',output)
print('Gradient\n',gradient_wrt_inputs)
import crysx_nn.activation as activation
import numpy as np
# inputs should be a 2d array where the rows correspond to the samples and the columns correspond to the nodes.
inputs = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
output = activation.Softmax(inputs)
jacobian_wrt_inputs = activation.Softmax_grad(inputs)
print('Output\n',output)
print('Gradient\n',jacobian_wrt_inputs)
List of available loss functions and their gradients loss.py
from crysx_nn.loss import MAE_loss, MAE_loss_grad
import numpy as np
# predictions and targets should be 2d arrays where the rows correspond to the samples and the columns correspond to the output nodes.
predictions = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
targets = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
error = MAE_loss(predictions, targets)
gradient_wrt_predictions = MAE_loss_grad(predictions, targets)
from crysx_nn.loss import MSE_loss, MSE_loss_grad
import numpy as np
# predictions and targets should be 2d arrays where the rows correspond to the samples and the columns correspond to the output nodes.
predictions = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
targets = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
error = MSE_loss(predictions, targets)
gradient_wrt_predictions = loss.MSE_loss_grad(predictions, targets)
from crysx_nn.loss import MSE_loss, MSE_loss_grad
import numpy as np
# predictions and targets should be 2d arrays where the rows correspond to the samples and the columns correspond to the output nodes.
predictions = np.random.random((5,4)) # BatchSize=nSamples=5; nodes=4
targets = np.random.randn((5,4)) # BatchSize=nSamples=5; nodes=4
error = BCE_loss(predictions, targets)
gradient_wrt_predictions = loss.BCE_loss_grad(predictions, targets)
List of available neural network functions and their gradients network.py
, as well as how to use them.