SlideShare a Scribd company logo
File: /home/andres/Dropbox/Classes/…es/Data/SimpleNeuralNetwork.py Page 1 of 3
import matplotlib.pyplot as plt
import numpy as np
from scipy.special import expit
def sigmoid(eval):
return expit(eval)
def Neural_Training(Y01, Labels01 , eta , Epochs):
d,samplenumb = Y01.shape
# Random [-1,1] init from Haykin
WIH = 2*np.mat(np.random.rand(2*d,d)) -1.0
WHO = 2*np.mat(np.random.rand(1,2*d)) -1.0
difft = Labels01.astype(np.float64)
for i in xrange(1,Epochs):
#Get the input to the output layer
y_j_temp = sigmoid(WIH*Y01)
netk = WHO*y_j_temp
zk = sigmoid(netk)
# Creating Delta Wk
diff1 = difft - zk
tDeltaWk = eta*np.multiply(diff1,np.multiply(sigmoid(netk),1.0-sigmoid(netk)))
tDeltaWk = np.tile(tDeltaWk,(2*d,1))
DeltaWk = np.multiply( y_j_temp,tDeltaWk)
DeltaWk = np.transpose(np.sum(DeltaWk,1))
# New Weights
WHO = WHO + DeltaWk
#Creating Delta Wj
dnetj = np.multiply(y_j_temp,1.0-y_j_temp)
tprodsumk = np.multiply(np.transpose(DeltaWk),np.transpose(WHO))
tprodsumk = np.tile(tprodsumk, (1,samplenumb) )
tprodsumk = eta*np.multiply(tprodsumk,dnetj)
DeltaWj = tprodsumk * np.transpose(Y01)
# New Weights
WIH = WIH + DeltaWj
return WIH, WHO
# Number of samples
N= 60000
#Number of Epochs
Epochs = 20
#Learning Rate
eta = 0.001
# opening images for [r]eading as [b]inary
in_file = open("train-images.idx3-ubyte", "rb")
in_file.read(16)
Data = in_file.read()
in_file.close()
# Transform the data stream
X = np.fromstring(Data, dtype=np.uint8)
X = X.astype(np.float64)
X = np.mat(X)
File: /home/andres/Dropbox/Classes/…es/Data/SimpleNeuralNetwork.py Page 2 of 3
X = X.reshape(N,784)
X = np.transpose(X)
# Now the labels:
in_file = open("train-labels.idx1-ubyte", "rb")
in_file.read(8)
DLabel = in_file.read()
in_file.close()
# Transform the label
Labels = np.fromstring(DLabel, dtype=np.uint8)
Labels01 = Labels[ np.logical_or(Labels[:]==0 , Labels[:]==1) ]
#Extract Data 01
X01 = X[:, Labels[ np.logical_or(Labels[:]==0 , Labels[:]==1) ] ]
dummy, N1 = X01.shape
#Mean Creation
Xmean= X01.sum(axis=1)
Xmean = (1.0/(N1*1.0))*Xmean
# Thus we create data with zero mean
X01= X01-Xmean
#Covariance
C_X = (1.0/(N1*1.0-1.0))*(X01*np.transpose(X01))
D, E = np.linalg.eigh(C_X)
idx = D.argsort()[::-1]
D = D[idx]
E = E[:,idx]
P = np.transpose(E[:,0:100])
Y01 = P*X01
# Run the Training
WIH, WHO = Neural_Training(Y01, Labels01 , eta, Epochs)
# Create the results
Results = sigmoid(WHO*sigmoid(WIH*Y01))
# Find the decisions
d,samplenumb = Y01.shape
index = np.ones(samplenumb)
Results = np.asarray(Results).reshape(-1)
# Generate the Confusion Matrix using a hard threshold half of minimum and maximum
t1 = np.max(Results[Labels01[:]==0])
t2 = np.max(Results[Labels01[:]==1])
tr = (t2-t1)/2.0
# print threshold
print tr
R11 = np.sum(index[ np.logical_and(Results[:]<tr, Labels01[:]==0 )])
R22 = np.sum(index[ np.logical_and(Results[:]>tr, Labels01[:]==1 )])
R12 = np.sum(index[ np.logical_and(Results[:]>=tr, Labels01[:]==0 ) ])
R21 = np.sum(index[ np.logical_and(Results[:]<=tr, Labels01[:]==1 ) ])
ConfusionMatrix = np.matrix([[R11 ,R12] , [R21 , R22]])
# Print the results
print "Confusion Matrix n", ConfusionMatrix
File: /home/andres/Dropbox/Classes/…es/Data/SimpleNeuralNetwork.py Page 3 of 3
# Print some Labels
for i in xrange(0,15):
print("Gen Label {} = {} Real Label {}".format(i , round(Results[i],2), Labels01[i]))

More Related Content

What's hot (20)

PPTX
Job sequencing with deadline
Arafat Hossan
 
PPTX
Birch Algorithm With Solved Example
kailash shaw
 
PPT
358 33 powerpoint-slides_16-files-their-organization_chapter-16
sumitbardhan
 
PPT
Divide and conquer
Dr Shashikant Athawale
 
PPTX
Tree Traversal
Md. Israil Fakir
 
PPTX
Sorting
Jasmine Chen
 
PPTX
Strassen's matrix multiplication
Megha V
 
PPTX
Greedy algorithms
sandeep54552
 
PPT
stack and queue array implementation in java.
CIIT Atd.
 
PPTX
Inline functions & macros
Anand Kumar
 
PPT
prolog ppt
sachin varun
 
PPTX
Application of greedy method prim
Tech_MX
 
PDF
Implementing Minimum Error Rate Classifier
Dipesh Shome
 
PDF
Daa notes 1
smruti sarangi
 
PPT
Primitive Recursive Functions
Radhakrishnan Chinnusamy
 
PDF
Algorithms Lecture 7: Graph Algorithms
Mohamed Loey
 
PDF
Array data structure
maamir farooq
 
PDF
Outlier detection method introduction
DaeJin Kim
 
PDF
Design and analysis of computer algorithms
Krishna Chaytaniah
 
DOCX
Lisp and prolog in artificial intelligence
ArtiSolanki5
 
Job sequencing with deadline
Arafat Hossan
 
Birch Algorithm With Solved Example
kailash shaw
 
358 33 powerpoint-slides_16-files-their-organization_chapter-16
sumitbardhan
 
Divide and conquer
Dr Shashikant Athawale
 
Tree Traversal
Md. Israil Fakir
 
Sorting
Jasmine Chen
 
Strassen's matrix multiplication
Megha V
 
Greedy algorithms
sandeep54552
 
stack and queue array implementation in java.
CIIT Atd.
 
Inline functions & macros
Anand Kumar
 
prolog ppt
sachin varun
 
Application of greedy method prim
Tech_MX
 
Implementing Minimum Error Rate Classifier
Dipesh Shome
 
Daa notes 1
smruti sarangi
 
Primitive Recursive Functions
Radhakrishnan Chinnusamy
 
Algorithms Lecture 7: Graph Algorithms
Mohamed Loey
 
Array data structure
maamir farooq
 
Outlier detection method introduction
DaeJin Kim
 
Design and analysis of computer algorithms
Krishna Chaytaniah
 
Lisp and prolog in artificial intelligence
ArtiSolanki5
 

Similar to Simple Neural Network Python Code (20)

DOCX
When running the code below I am getting some errors (see image)- The.docx
maximapikvu8
 
DOCX
Please help fill in the missing code below in order for it run correct.docx
madalynbb3ja
 
PDF
About RNN
Young Oh Jeong
 
PDF
About RNN
Young Oh Jeong
 
PDF
Quantum simulation Mathematics and Python examples
RaviSankar637310
 
PPTX
Working with tf.data (TF 2)
Oswald Campesato
 
PDF
Codecomparaison
SokhnaRokhayaDIOP
 
PPTX
TensorFlow for IITians
Ashish Bansal
 
PPTX
Introduction to Neural Networks and Deep Learning from Scratch
Ahmed BESBES
 
DOCX
Practicle 1.docx
GaneshPawar819187
 
PDF
Implement the following sorting algorithms Bubble Sort Insertion S.pdf
kesav24
 
DOC
Revision Tour 1 and 2 complete.doc
SrikrishnaVundavalli
 
PDF
Please help this code is supposed to evaluate current node state and i.pdf
climatecontrolsv
 
DOCX
Numerical Method Assignment
ashikul akash
 
PPTX
AI02_Python (cont.).pptx
Nguyễn Tiến
 
PPTX
Tutorial on convolutional neural networks
Hojin Yang
 
PPTX
BASIC OF PYTHON MATPLOTLIB USED IN ARTIFICIAL INTELLIGENCE AND ML
AbhaysinhVansadia
 
PDF
Pythonbrasil - 2018 - Acelerando Soluções com GPU
Paulo Sergio Lemes Queiroz
 
PPTX
multiple linear regression
Akhilesh Joshi
 
DOCX
assignment_7_sc report for soft comptuing
SainadhReddySyamalaA
 
When running the code below I am getting some errors (see image)- The.docx
maximapikvu8
 
Please help fill in the missing code below in order for it run correct.docx
madalynbb3ja
 
About RNN
Young Oh Jeong
 
About RNN
Young Oh Jeong
 
Quantum simulation Mathematics and Python examples
RaviSankar637310
 
Working with tf.data (TF 2)
Oswald Campesato
 
Codecomparaison
SokhnaRokhayaDIOP
 
TensorFlow for IITians
Ashish Bansal
 
Introduction to Neural Networks and Deep Learning from Scratch
Ahmed BESBES
 
Practicle 1.docx
GaneshPawar819187
 
Implement the following sorting algorithms Bubble Sort Insertion S.pdf
kesav24
 
Revision Tour 1 and 2 complete.doc
SrikrishnaVundavalli
 
Please help this code is supposed to evaluate current node state and i.pdf
climatecontrolsv
 
Numerical Method Assignment
ashikul akash
 
AI02_Python (cont.).pptx
Nguyễn Tiến
 
Tutorial on convolutional neural networks
Hojin Yang
 
BASIC OF PYTHON MATPLOTLIB USED IN ARTIFICIAL INTELLIGENCE AND ML
AbhaysinhVansadia
 
Pythonbrasil - 2018 - Acelerando Soluções com GPU
Paulo Sergio Lemes Queiroz
 
multiple linear regression
Akhilesh Joshi
 
assignment_7_sc report for soft comptuing
SainadhReddySyamalaA
 
Ad

More from Andres Mendez-Vazquez (20)

PDF
2.03 bayesian estimation
Andres Mendez-Vazquez
 
PDF
05 linear transformations
Andres Mendez-Vazquez
 
PDF
01.04 orthonormal basis_eigen_vectors
Andres Mendez-Vazquez
 
PDF
01.03 squared matrices_and_other_issues
Andres Mendez-Vazquez
 
PDF
01.02 linear equations
Andres Mendez-Vazquez
 
PDF
01.01 vector spaces
Andres Mendez-Vazquez
 
PDF
06 recurrent neural_networks
Andres Mendez-Vazquez
 
PDF
05 backpropagation automatic_differentiation
Andres Mendez-Vazquez
 
PDF
Zetta global
Andres Mendez-Vazquez
 
PDF
01 Introduction to Neural Networks and Deep Learning
Andres Mendez-Vazquez
 
PDF
25 introduction reinforcement_learning
Andres Mendez-Vazquez
 
PDF
Neural Networks and Deep Learning Syllabus
Andres Mendez-Vazquez
 
PDF
Introduction to artificial_intelligence_syllabus
Andres Mendez-Vazquez
 
PDF
Ideas 09 22_2018
Andres Mendez-Vazquez
 
PDF
Ideas about a Bachelor in Machine Learning/Data Sciences
Andres Mendez-Vazquez
 
PDF
Analysis of Algorithms Syllabus
Andres Mendez-Vazquez
 
PDF
20 k-means, k-center, k-meoids and variations
Andres Mendez-Vazquez
 
PDF
18.1 combining models
Andres Mendez-Vazquez
 
PDF
17 vapnik chervonenkis dimension
Andres Mendez-Vazquez
 
PDF
A basic introduction to learning
Andres Mendez-Vazquez
 
2.03 bayesian estimation
Andres Mendez-Vazquez
 
05 linear transformations
Andres Mendez-Vazquez
 
01.04 orthonormal basis_eigen_vectors
Andres Mendez-Vazquez
 
01.03 squared matrices_and_other_issues
Andres Mendez-Vazquez
 
01.02 linear equations
Andres Mendez-Vazquez
 
01.01 vector spaces
Andres Mendez-Vazquez
 
06 recurrent neural_networks
Andres Mendez-Vazquez
 
05 backpropagation automatic_differentiation
Andres Mendez-Vazquez
 
Zetta global
Andres Mendez-Vazquez
 
01 Introduction to Neural Networks and Deep Learning
Andres Mendez-Vazquez
 
25 introduction reinforcement_learning
Andres Mendez-Vazquez
 
Neural Networks and Deep Learning Syllabus
Andres Mendez-Vazquez
 
Introduction to artificial_intelligence_syllabus
Andres Mendez-Vazquez
 
Ideas 09 22_2018
Andres Mendez-Vazquez
 
Ideas about a Bachelor in Machine Learning/Data Sciences
Andres Mendez-Vazquez
 
Analysis of Algorithms Syllabus
Andres Mendez-Vazquez
 
20 k-means, k-center, k-meoids and variations
Andres Mendez-Vazquez
 
18.1 combining models
Andres Mendez-Vazquez
 
17 vapnik chervonenkis dimension
Andres Mendez-Vazquez
 
A basic introduction to learning
Andres Mendez-Vazquez
 
Ad

Recently uploaded (20)

PDF
MODULE-5 notes [BCG402-CG&V] PART-B.pdf
Alvas Institute of Engineering and technology, Moodabidri
 
PDF
NFPA 10 - Estandar para extintores de incendios portatiles (ed.22 ENG).pdf
Oscar Orozco
 
DOCX
Engineering Geology Field Report to Malekhu .docx
justprashant567
 
PPT
FINAL plumbing code for board exam passer
MattKristopherDiaz
 
PDF
William Stallings - Foundations of Modern Networking_ SDN, NFV, QoE, IoT, and...
lavanya896395
 
PDF
Digital water marking system project report
Kamal Acharya
 
PDF
Pictorial Guide To Checks On Tankers' IG system
Mahmoud Moghtaderi
 
PPTX
Explore USA’s Best Structural And Non Structural Steel Detailing
Silicon Engineering Consultants LLC
 
PDF
Authentication Devices in Fog-mobile Edge Computing Environments through a Wi...
ijujournal
 
PPTX
Engineering Quiz ShowEngineering Quiz Show
CalvinLabial
 
PDF
NTPC PATRATU Summer internship report.pdf
hemant03701
 
PPTX
Distribution reservoir and service storage pptx
dhanashree78
 
PDF
13th International Conference of Security, Privacy and Trust Management (SPTM...
ijcisjournal
 
PDF
WD2(I)-RFQ-GW-1415_ Shifting and Filling of Sand in the Pond at the WD5 Area_...
ShahadathHossain23
 
PPTX
Alan Turing - life and importance for all of us now
Pedro Concejero
 
PDF
Submit Your Papers-International Journal on Cybernetics & Informatics ( IJCI)
IJCI JOURNAL
 
PDF
this idjfk sgfdhgdhgdbhgbgrbdrwhrgbbhtgdt
WaleedAziz7
 
PDF
20ES1152 Programming for Problem Solving Lab Manual VRSEC.pdf
Ashutosh Satapathy
 
PDF
PROGRAMMING REQUESTS/RESPONSES WITH GREATFREE IN THE CLOUD ENVIRONMENT
samueljackson3773
 
PPTX
Numerical-Solutions-of-Ordinary-Differential-Equations.pptx
SAMUKTHAARM
 
MODULE-5 notes [BCG402-CG&V] PART-B.pdf
Alvas Institute of Engineering and technology, Moodabidri
 
NFPA 10 - Estandar para extintores de incendios portatiles (ed.22 ENG).pdf
Oscar Orozco
 
Engineering Geology Field Report to Malekhu .docx
justprashant567
 
FINAL plumbing code for board exam passer
MattKristopherDiaz
 
William Stallings - Foundations of Modern Networking_ SDN, NFV, QoE, IoT, and...
lavanya896395
 
Digital water marking system project report
Kamal Acharya
 
Pictorial Guide To Checks On Tankers' IG system
Mahmoud Moghtaderi
 
Explore USA’s Best Structural And Non Structural Steel Detailing
Silicon Engineering Consultants LLC
 
Authentication Devices in Fog-mobile Edge Computing Environments through a Wi...
ijujournal
 
Engineering Quiz ShowEngineering Quiz Show
CalvinLabial
 
NTPC PATRATU Summer internship report.pdf
hemant03701
 
Distribution reservoir and service storage pptx
dhanashree78
 
13th International Conference of Security, Privacy and Trust Management (SPTM...
ijcisjournal
 
WD2(I)-RFQ-GW-1415_ Shifting and Filling of Sand in the Pond at the WD5 Area_...
ShahadathHossain23
 
Alan Turing - life and importance for all of us now
Pedro Concejero
 
Submit Your Papers-International Journal on Cybernetics & Informatics ( IJCI)
IJCI JOURNAL
 
this idjfk sgfdhgdhgdbhgbgrbdrwhrgbbhtgdt
WaleedAziz7
 
20ES1152 Programming for Problem Solving Lab Manual VRSEC.pdf
Ashutosh Satapathy
 
PROGRAMMING REQUESTS/RESPONSES WITH GREATFREE IN THE CLOUD ENVIRONMENT
samueljackson3773
 
Numerical-Solutions-of-Ordinary-Differential-Equations.pptx
SAMUKTHAARM
 

Simple Neural Network Python Code

  • 1. File: /home/andres/Dropbox/Classes/…es/Data/SimpleNeuralNetwork.py Page 1 of 3 import matplotlib.pyplot as plt import numpy as np from scipy.special import expit def sigmoid(eval): return expit(eval) def Neural_Training(Y01, Labels01 , eta , Epochs): d,samplenumb = Y01.shape # Random [-1,1] init from Haykin WIH = 2*np.mat(np.random.rand(2*d,d)) -1.0 WHO = 2*np.mat(np.random.rand(1,2*d)) -1.0 difft = Labels01.astype(np.float64) for i in xrange(1,Epochs): #Get the input to the output layer y_j_temp = sigmoid(WIH*Y01) netk = WHO*y_j_temp zk = sigmoid(netk) # Creating Delta Wk diff1 = difft - zk tDeltaWk = eta*np.multiply(diff1,np.multiply(sigmoid(netk),1.0-sigmoid(netk))) tDeltaWk = np.tile(tDeltaWk,(2*d,1)) DeltaWk = np.multiply( y_j_temp,tDeltaWk) DeltaWk = np.transpose(np.sum(DeltaWk,1)) # New Weights WHO = WHO + DeltaWk #Creating Delta Wj dnetj = np.multiply(y_j_temp,1.0-y_j_temp) tprodsumk = np.multiply(np.transpose(DeltaWk),np.transpose(WHO)) tprodsumk = np.tile(tprodsumk, (1,samplenumb) ) tprodsumk = eta*np.multiply(tprodsumk,dnetj) DeltaWj = tprodsumk * np.transpose(Y01) # New Weights WIH = WIH + DeltaWj return WIH, WHO # Number of samples N= 60000 #Number of Epochs Epochs = 20 #Learning Rate eta = 0.001 # opening images for [r]eading as [b]inary in_file = open("train-images.idx3-ubyte", "rb") in_file.read(16) Data = in_file.read() in_file.close() # Transform the data stream X = np.fromstring(Data, dtype=np.uint8) X = X.astype(np.float64) X = np.mat(X)
  • 2. File: /home/andres/Dropbox/Classes/…es/Data/SimpleNeuralNetwork.py Page 2 of 3 X = X.reshape(N,784) X = np.transpose(X) # Now the labels: in_file = open("train-labels.idx1-ubyte", "rb") in_file.read(8) DLabel = in_file.read() in_file.close() # Transform the label Labels = np.fromstring(DLabel, dtype=np.uint8) Labels01 = Labels[ np.logical_or(Labels[:]==0 , Labels[:]==1) ] #Extract Data 01 X01 = X[:, Labels[ np.logical_or(Labels[:]==0 , Labels[:]==1) ] ] dummy, N1 = X01.shape #Mean Creation Xmean= X01.sum(axis=1) Xmean = (1.0/(N1*1.0))*Xmean # Thus we create data with zero mean X01= X01-Xmean #Covariance C_X = (1.0/(N1*1.0-1.0))*(X01*np.transpose(X01)) D, E = np.linalg.eigh(C_X) idx = D.argsort()[::-1] D = D[idx] E = E[:,idx] P = np.transpose(E[:,0:100]) Y01 = P*X01 # Run the Training WIH, WHO = Neural_Training(Y01, Labels01 , eta, Epochs) # Create the results Results = sigmoid(WHO*sigmoid(WIH*Y01)) # Find the decisions d,samplenumb = Y01.shape index = np.ones(samplenumb) Results = np.asarray(Results).reshape(-1) # Generate the Confusion Matrix using a hard threshold half of minimum and maximum t1 = np.max(Results[Labels01[:]==0]) t2 = np.max(Results[Labels01[:]==1]) tr = (t2-t1)/2.0 # print threshold print tr R11 = np.sum(index[ np.logical_and(Results[:]<tr, Labels01[:]==0 )]) R22 = np.sum(index[ np.logical_and(Results[:]>tr, Labels01[:]==1 )]) R12 = np.sum(index[ np.logical_and(Results[:]>=tr, Labels01[:]==0 ) ]) R21 = np.sum(index[ np.logical_and(Results[:]<=tr, Labels01[:]==1 ) ]) ConfusionMatrix = np.matrix([[R11 ,R12] , [R21 , R22]]) # Print the results print "Confusion Matrix n", ConfusionMatrix
  • 3. File: /home/andres/Dropbox/Classes/…es/Data/SimpleNeuralNetwork.py Page 3 of 3 # Print some Labels for i in xrange(0,15): print("Gen Label {} = {} Real Label {}".format(i , round(Results[i],2), Labels01[i]))