golden.py
import math as mt def golden(f,a,b,tol=1.0e-10): # Golden section method for determining x # that minimizes the scalar function f(x). # The minimum must be bracketed in (a,b). c1 = (mt.sqrt(5.0)-1.0)/2.0 c2 = 1.0 - c1 nIt = int(mt.ceil(mt.log(tol/abs(a-b))/mt.log(c1))) # first step x1 = c1*a + c2*b x2 = c2*a + c1*b f1=f(x1) f2=f(x2) # iteration for i in range(nIt): if (f1 > f2): a = x1 x1 = x2 f1 = f2 x2 = c2*a + c1*b f2=f(x2) else: b = x2 x2 = x1 f2 = f1 x1 = c1*a + c2*b f1 = f(x1) if (f1<f2): return x1,f1 else: return x2,f2
grad.py
import numpy as np import math as mt from golden import golden def grad(F,gradf,x,d=0.5,tol=1.0e-10): # Gradient method for determining vector x # that minimizes the function F(x); # gradf(x) is function for grad(F); # x is the starting point. # Line function along h def f(al): return F(x+al*h) gr0=-gradf(x) h = gr0.copy() F0=F(x) itMax = 500 for i in range(itMax): # Minimization 1D function al,fMin = golden(f,0,d) x = x + al*h F1=F(x) gr1=-gradf(x) if (mt.sqrt(np.dot(gr1,gr1)) <= tol) or (abs(F0 - F1) < tol): return x, i+1 h = gr1 grO = gr1.copy() F0 = F1 print("Gradient method did not converge (500 iterations)")
ex330.py
import numpy as np import matplotlib.pyplot as plt from grad import grad def F(x): return 10.0*(x[1]-x[0]**2)**2 + (1-x[0])**2 def gradf(x): gr = np.zeros((2),'float') gr[0] = -40.0*(x[1]-x[0]**2) * x[0] - 2.0*(1-x[0]) gr[1] = 20.0*(x[1]-x[0]**2) return gr x = np.linspace(-3.0,3.0,101) y = np.linspace(-2.0,6.0,101) X,Y = np.meshgrid(x,y) z=F([X,Y]) v=np.linspace(0.0,20.0,5) plt.contourf(x,y,z,v, cmap=plt.cm.gray) plt.colorbar() x0=np.array([0.0,0.1]) xMin,nIt=grad(F,gradf,x0) print("xMin=",xMin) print("Number of iterations=",nIt) plt.scatter(xMin[0],xMin[1],c="red") plt.savefig('ex330.png', dpi=72) plt.show()
Recent Comments