-
Notifications
You must be signed in to change notification settings - Fork 0
/
Demo_Plain_DNN.py
84 lines (67 loc) · 3.51 KB
/
Demo_Plain_DNN.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 29 13:21:50 2018
@author: Esmaeil Seraj <[email protected]>
@website: https://github.com/EsiSeraj/
Demo Script for "Plain_DNN.py", a plain Deep Neural Network (Without any
improvement technique, a plain implementation of deep neural networks) model
generator with all required helper functions
Copyright (C) <2018> <Esmaeil Seraj>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import numpy as np
# =============================================================================
# "Plain_DNN.py" test: plain Deep Neural Network (Without any improvement tech)
# This module includes all of the helper functions for plain deep Neural Net
# included functions: sigmoid(), relu(), tanh(), sigmoid_backward(),
# relu_backward(), tanh_backward(), initialize_parameters_deep(),
# linear_forward(), activation_forward(), forward_propagation_deep(),
# compute_cost(), linear_backward(), activation_backward(), predict_deep(),
# backward_propagation_deep(), update_parameters_deep(), plain_nn_model_deep(),
# =============================================================================
import Plain_DNN as pDNN
## initialize parameters
layers_dims = [10, 4, 6, 1]
parameters = pDNN.initialize_parameters_deep(layers_dims)
print("W1 = " + str(parameters["W1"]))
print("b1 = " + str(parameters["b1"]))
print("W2 = " + str(parameters["W2"]))
print("b2 = " + str(parameters["b2"]))
print("W3 = " + str(parameters["W3"]))
print("b3 = " + str(parameters["b3"]))
## forward propagation
np.random.seed(1)
X_train = np.random.randn(layers_dims[0], 500)
Y_train = np.random.randint(2, size = (1, X_train.shape[1]))
X_test = np.random.randn(layers_dims[0], 200)
Y_test = np.random.randint(2, size = (1, X_test.shape[1]))
activation = 'relu'
y_hat, caches = pDNN.forward_propagation_deep(X_train, parameters, activation)
print("y_hat = " + str(y_hat))
print("Length of caches list = " + str(len(caches)))
## compute the cost function
c = pDNN.compute_cost(y_hat, Y_train)
print("cost = " + str(c))
## backward propagation
gradients = pDNN.backward_propagation_deep(y_hat, Y_train, caches, activation)
print("\n check the gradients dictionary in Variable Explorer window..!!\n")
## update parameters
learning_rate = 0.001
params1 = pDNN.update_parameters_deep(parameters, gradients, learning_rate)
print("\n check the updated parameters dictionary in Variable Explorer window..!!\n")
## plain deep neural network model generator
params2, costs = pDNN.plain_nn_model_deep(X_train, Y_train, layers_dims, activation, learning_rate,
num_iterations = 3000, print_cost = True, plot_lrn_curve = True)
## prediction using the output of a trained deep model
preds = pDNN.predict_deep(X_train, Y_train, parameters, activation, print_accuracy = True)
###############################################################################
###############################################################################