Need Help with this code completion.  class neural_network:      def __init__(self,Nhidden,Ninput,Noutput,Nonlinearities):                           self.layer = []                  # BUILD THE NETWORK                  # Add a linear layer with Ninput input features and Nhidden output features         # YOUR CODE HERE          # Add a non-linearity         if(Nonlinearities[0]=='relu'):             self.layer.append(relu())         else:             self.layer.append(sigmoid())                  # Add a linear layer         # YOUR CODE HERE                   # Add a non-linearity                       def forward(self,X):                   # Forward propagation         # YOUR CODE HERE          return X          def backward(self,dY):         # Backward propagation         # YOUR CODE HERE                   return      def update(self, learning_rate):         # Updating the parameters of the network according to algorithm                   # YOUR CODE HERE                  self.layer[0].W =          self.layer[0].b =          self.layer[2].W =          self.layer[2].b =

Database System Concepts
7th Edition
ISBN:9780078022159
Author:Abraham Silberschatz Professor, Henry F. Korth, S. Sudarshan
Publisher:Abraham Silberschatz Professor, Henry F. Korth, S. Sudarshan
Chapter1: Introduction
Section: Chapter Questions
Problem 1PE
icon
Related questions
Question

Need Help with this code completion. 

class neural_network: 
    def __init__(self,Nhidden,Ninput,Noutput,Nonlinearities):
        
        
        self.layer = []
        
        # BUILD THE NETWORK
        
        # Add a linear layer with Ninput input features and Nhidden output features
        # YOUR CODE HERE 

        # Add a non-linearity
        if(Nonlinearities[0]=='relu'):
            self.layer.append(relu())
        else:
            self.layer.append(sigmoid())
        
        # Add a linear layer
        # YOUR CODE HERE 
        
        # Add a non-linearity
        
        
    def forward(self,X): 
        
        # Forward propagation
        # YOUR CODE HERE 
        return X
    
    def backward(self,dY):
        # Backward propagation
        # YOUR CODE HERE 
        
        return 

    def update(self, learning_rate):
        # Updating the parameters of the network according to algorithm 
        
        # YOUR CODE HERE
        
        self.layer[0].W = 
        self.layer[0].b = 
        self.layer[2].W = 
        self.layer[2].b = 

 

Expert Solution
Step 1: Here's the completed code with comments explaining each step:

Here's the completed code with comments explaining each step:

CODE in Python:

class neural_network: 
    def __init__(self, Nhidden, Ninput, Noutput, Nonlinearities):
        
        self.layer = []
        
        # BUILD THE NETWORK
        
        # Add a linear layer with Ninput input features and Nhidden output features
        # YOUR CODE HERE 
        self.layer.append(linear(Ninput, Nhidden))

        # Add a non-linearity
        if Nonlinearities[0] == 'relu':
            self.layer.append(relu())
        else:
            self.layer.append(sigmoid())
        
        # Add a linear layer with Nhidden input features and Noutput output features
        # YOUR CODE HERE 
        self.layer.append(linear(Nhidden, Noutput))
        
        # Add a non-linearity
        if Nonlinearities[1] == 'relu':
            self.layer.append(relu())
        else:
            self.layer.append(sigmoid())
        
    def forward(self, X): 
        
        # Forward propagation
        # YOUR CODE HERE 
        for layer in self.layer:
            X = layer.forward(X)
        return X
    
    def backward(self, dY):
        # Backward propagation
        # YOUR CODE HERE 
        for layer in reversed(self.layer):
            dY = layer.backward(dY)
        return dY

    def update(self, learning_rate):
        # Updating the parameters of the network according to algorithm 
        
        # YOUR CODE HERE
        self.layer[0].W = self.layer[0].W - learning_rate * self.layer[0].dW
        self.layer[0].b = self.layer[0].b - learning_rate * self.layer[0].db
        self.layer[2].W = self.layer[2].W - learning_rate * self.layer[2].dW
        self.layer[2].b = self.layer[2].b - learning_rate * self.layer[2].db

steps

Step by step

Solved in 2 steps

Blurred answer
Knowledge Booster
Use of XOR function
Learn more about
Need a deep-dive on the concept behind this application? Look no further. Learn more about this topic, computer-science and related others by exploring similar questions and additional content below.
Similar questions
  • SEE MORE QUESTIONS
Recommended textbooks for you
Database System Concepts
Database System Concepts
Computer Science
ISBN:
9780078022159
Author:
Abraham Silberschatz Professor, Henry F. Korth, S. Sudarshan
Publisher:
McGraw-Hill Education
Starting Out with Python (4th Edition)
Starting Out with Python (4th Edition)
Computer Science
ISBN:
9780134444321
Author:
Tony Gaddis
Publisher:
PEARSON
Digital Fundamentals (11th Edition)
Digital Fundamentals (11th Edition)
Computer Science
ISBN:
9780132737968
Author:
Thomas L. Floyd
Publisher:
PEARSON
C How to Program (8th Edition)
C How to Program (8th Edition)
Computer Science
ISBN:
9780133976892
Author:
Paul J. Deitel, Harvey Deitel
Publisher:
PEARSON
Database Systems: Design, Implementation, & Manag…
Database Systems: Design, Implementation, & Manag…
Computer Science
ISBN:
9781337627900
Author:
Carlos Coronel, Steven Morris
Publisher:
Cengage Learning
Programmable Logic Controllers
Programmable Logic Controllers
Computer Science
ISBN:
9780073373843
Author:
Frank D. Petruzella
Publisher:
McGraw-Hill Education