gradient ascent algorithm python
import matplotlib.pyplot as mp, numpy as np
# Primitive
origin = lambda x: 2 * x - x ** 2
x = np.linspace(0, 2, 9999)
mp.plot(x, origin(x), c='black') # Visualization
# Derivative of the original function
derivative = lambda x: 2 - 2 * x
# Gradient rising demand
extreme_point = 0 # Initial value
alpha = 0.1 # Step, that is the learning rate
presision = 0.001 # Range of tolerance
while True:
mp.scatter(extreme_point, origin(extreme_point)) # Visualization
error = alpha * derivative(extreme_point) # Climbing pace
extreme_point += error Climbing #
if abs(error) < presision:
break # Exit iterative error is small
mp.show()123456789101112131415161718