# Python Program to Find Derivative of Activation Functions Assignment Solution.

## Instructions

Objective
Write a python program to find derivative of activation functions in python.

## Requirements and Specifications Source Code

```def Question1():     print(" *** QUESTION 1 *** ")     # Part 1)     print("I love Machine Learning and AI!")     print("Ethel") # Name     print("") def Question2():     print(" *** QUESTION 2 *** ")     # Part 2)     name = input("Enter name: ")     print("Hi ", name, ", very nice to meet you!")     print("") def Question3():     print(" *** QUESTION 3 *** ")     # Part 3)     my_integer = 5     my_float = float(1.5)     my_string = "this is a string"     my_boolean = True     print("The integer variable is: ", my_integer)     print("The float variable is: ", my_float)     print("The string variable is: ", my_string)     print("The boolean variable is: ", my_boolean)     print("") def Question4():     print(" *** QUESTION 4 *** ")     # Part 4)     for i in range(11): # from 0 to 10         print(i, " - hello")     print("") def Question5():     print(" *** QUESTION 5 *** ")     # Part 5)     # Import the module NumPy     import numpy as np     # Create two vectors     vec1 = np.array([5,0,0])     vec2 = np.array([0,10,0])     # Calculate its cross product     vec3 = np.cross(vec1,vec2)     print(f"The cross product of {vec1} and {vec2} is {vec3}")     print("") def Question6():     print(" *** QUESTION 6 *** ")     # Part 6)     # Import matplotlib and numpy     import numpy as np     import matplotlib.pyplot as plt     # Now, create a vector of 1000 x values from -10 to 10     xspan = np.linspace(-10,10,1000)     # Sigmoid and its derivative     def sigmoid(x):         return 1.0/(1.0+np.exp(-xspan))     def sigmoid_diff(x):         return sigmoid(x)*(1-sigmoid(x))     # Tanh and derivative of tanh     def tanh(x):         return (np.exp(xspan) - np.exp(-xspan))/(np.exp(xspan) + np.exp(-xspan))     def tanh_diff(x):         return 1.0 - np.power(tanh(x), 2)     # ReLu and its derivative     def ReLu(x):         y = np.zeros(len(x))         i = 0         for xi in x:             if xi > 0:                 y[i] = xi             i += 1         return y     def ReLu_diff(x):         y = np.zeros(len(x))         i = 0         for xi in x:             if xi > 0:                 y[i] = 1             elif xi == 0:                 y[i] = np.infty             i += 1         return y     # Now plot     plt.plot(xspan, sigmoid_diff(xspan), label = 'diff sigmoid')     plt.plot(xspan, tanh_diff(xspan), label = 'diff tanh')     plt.plot(xspan, ReLu_diff(xspan), label = 'diff ReLu')     plt.xlabel('X')     plt.ylabel('Y')     plt.grid(True)     plt.legend()     plt.show()     print("") def Question7():     print(" *** QUESTION 7 *** ")     # Part 7)     # Import matplotlib and numpy     import numpy as np     import matplotlib.pyplot as plt     # Now, create a vector of 1000 x values from -10 to 10     xspan = np.linspace(-10, 10, 1000)     # SQLU and its derivative     def SQLU_diff(x):         y = np.zeros(len(x))         i = 0         for xi in x:             if xi > 0.0:                 y[i] = 1             elif xi >= -2.0 and xi <= 0:                 y[i] = 1 + xi/2.0             elif xi < -2.0:                 y[i] = 0             i += 1         return y     # Diff of Bent identity     def Bent_diff(x):         return x/(2*np.sqrt(np.power(x, 2) + 1)) + 1.0     def Gaussian_diff(x):         return -2.0*x*np.exp(-np.power(x,2))     # Now plot     plt.plot(xspan, SQLU_diff(xspan), label='diff SQLU')     plt.plot(xspan, Bent_diff(xspan), label='diff Bent')     plt.plot(xspan, Gaussian_diff(xspan), label='diff Gaussian')     plt.xlabel('X')     plt.ylabel('Y')     plt.grid(True)     plt.legend()     plt.show()     """         SQLU Function and its derivative             SQLU function is defined for all real values. For positive values, the range is always 1             while for a range between -2 and 0, the range is equal to 1 +x/2, which translates to             a line with a slope of 0.5 and a y-intercept at y = 1. For negative values, the range is 0             So, its range is: [0,1]         Bent function and its derivative             This function depends of the square root of the squared of x, which means that the term             inside the square root will never be negative or equal to zero. This means that there are no             values of x for wich its derivative is undefined.             For negative values, the function approaches to 0.5. For positive values, the function             approaches to 1.5, and for x = 0, the value of the function is 1.         Gaussian function and its derivative             The derivative of the Gaussian function contains one global maximum and one global minimum.             This function depends of an exponential term, but this term is equal to the negative of the squared             of x, which means that the exponential term will never be higher than 1. Since the term             depends of the square of x, the values of this exponential will always be positive and <= 1             So, for negative or positive values, this term is always smaller than 1, and for values of |x| near             infinite, the term is zero. This means that, the range of the function is between its global maximum             and minimum, which is: [-0.8578, 0.8578] at x1 = -0.7071 and x = 0.7071     """ if __name__ == '__main__':     Question1()     Question2()     Question3()     Question4()     Question5()     Question6()     Question7()```