76 lines
1.3 KiB
Python
Executable File
76 lines
1.3 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
|
|
|
|
"""
|
|
@authors: Yann
|
|
"""
|
|
|
|
|
|
import numpy as np
|
|
|
|
|
|
"""
|
|
A simple program that implements Borzilai & Borwein formula on gradient
|
|
descent method.
|
|
"""
|
|
|
|
|
|
###############################################################################
|
|
|
|
|
|
# Let's just return a cost function (here J(x) = x^2 + x + y^2)
|
|
def cost(x):
|
|
|
|
return x[0]**2 + x[0] + x[1] * x[1]
|
|
|
|
|
|
# Computes the gradient of the cost function !
|
|
def gradient(x):
|
|
|
|
return np.asarray([2 * x[0] + 1, 2 * x[1]])
|
|
|
|
|
|
def main():
|
|
|
|
# ############################## Parameters ##############################
|
|
|
|
nbIterations = 1000
|
|
alpha = 0.1
|
|
x = np.asarray([58, -47])
|
|
|
|
###########################################################################
|
|
|
|
# ################################# Begin #################################
|
|
|
|
for i in range(nbIterations):
|
|
|
|
grad = gradient(x)
|
|
|
|
x = x - alpha * grad
|
|
|
|
grad = gradient(x)
|
|
|
|
if np.array_equal([0, 0], grad):
|
|
|
|
break
|
|
|
|
xNext = x - alpha * grad
|
|
|
|
gradNext = gradient(xNext)
|
|
|
|
deltaGrad = gradNext - grad
|
|
|
|
deltaX = -alpha * grad
|
|
|
|
alpha = np.dot(deltaX, deltaGrad) / np.dot(deltaGrad, deltaGrad)
|
|
|
|
print(alpha)
|
|
|
|
print(x)
|
|
|
|
# ################################## End ##################################
|
|
|
|
|
|
if __name__ == '__main__':
|
|
main()
|