!******************************************************* !* Program to demonstrate the use of multi-dimensional * !* Steepest Descent Optimization subroutine * !* --------------------------------------------------- * !* Reference: BASIC Scientific Subroutines, Vol. II * !* By F.R. Ruckdeschel, BYTE/McGRAWW-HILL, 1981 [1]. * !* * !* F90 version by J-P Moreau, Paris. * !* (www.jpmoreau.fr) * !* --------------------------------------------------- * !* Example: Find a local maximum of * !* F(x,y,z) = sin(x)+2*cos(y)-sin(z) * !* * !* SAMPLE RUN: * !* * !* How many dimensions: 3 * !* * !* Convergence criterion: .000000001 * !* * !* Maximum number of iterations: 50 * !* * !* Starting constant: 1 * !* * !* Input the starting point: * !* * !* X(1) = 1 * !* X(2) = 1 * !* X(3) = 1 * !* * !* The results are: * !* * !* X(1) = 1.5707963 * !* X(2) = -0.0000435 * !* X(3) = -1.5707963 * !* * !* Local maximum = 4.0000000 * !* * !* The number of steps was: 30 * !* * !******************************************************* PROGRAM DEMO_STEEPDS real*8 D(3), Y(3) real*8 X(10),X1(10),Eval real*8 e,xk integer i,l,m,n write(*,"(/' How many dimensions: ')",advance='no') read *, l write(*,"(' Convergence criterion: ')",advance='no'); read *, e write(*,"(' Maximum number of iterations: ')",advance='no') read *, m write(*,"(' Starting constant: ')",advance='no') read *, xk print *,' ' print *,'Input the starting point:' print *,' ' do i=1, l write(*,40,advance='no') i read *, X(i) end do call Steepds(l,e,m,xk,X,X1,Y,D,n) !Call steepest descent optimization subroutine print *,' ' print *,'The results are:' print *,' ' do i=1, l write(*,50) i,X(i) end do write(*,60) Eval(X,D) write(*,70) n stop 40 format(' X(',i2,') = ') 50 format(' X(',i2,') = ',F10.7) 60 format(/' Local maximum = ',F10.7) 70 format(' The number of iterations was ',i2//) end !********************************************************* ! Function subroutine with derivatives (L=3) real*8 Function Eval(X,D) real*8 D(3),X(10) D(1)=dcos(X(1)); D(2)=-2.0*dsin(X(2)); D(3)=-dcos(X(3)) Eval = dsin(X(1))+2.0*dcos(X(2))-dsin(X(3)) end !********************************************************* ! Function called by Steepds() Subroutine Utilit(l,xk,D,X,X1,Y) real*8 xk,D(3),X(10),X1(10),Y(3),Eval integer i,l; real*8 dd ! Find the magnitude of the gradient dd=0.d0 do i=1, l dd=dd+D(i)*D(i) end do dd=dsqrt(dd) ! Update the X(i) do i=1, l ! Save old values X1(i)=X(i) X(i)=X(i)+xk*D(i)/dd end do Y(3)=Eval(X,D) return end !*************************************************** !* Steepest descent optimization subroutine * !* ----------------------------------------------- * !* This routine finds the local maximum or minimum * !* of an L-dimensional function using the method * !* of steepest decent, or the gradient. * !* The function and the L derivatives must be * !* available in Function Eval(X,D). * !* ----------------------------------------------- * !* INPUTS: * !* l - The dimension of function to study * !* e - The convergence criteria * !* m - The maximum number of iterations * !* xk - A starting constant * !* X(i) - Initial values of variables * !* OUTPUTS: * !* X(i) - The locally optimum set * !* Eval - The value of local maximum * !* n - The number of iterations performed, * !*************************************************** Subroutine Steepds(l,e,m,xk,X,X1,Y,D,n) ! Labels: e50,e51,e100,e200 parameter(MACHEPS=1.d-15) real*8 e,xk,X(10),X1(10),D(3),Y(3),Eval integer i,j,m,n,l n=0 ! Start initial probe do j=1, 3 ! Obtain yy and D(i) Y(j)=Eval(X,D) ! Update X(i) call Utilit(l,xk,D,X,X1,Y) end do ! We now have a history to base the subsequent search on ! Accelerate search if approach is monotonic 50 if (dabs(Y(2)-Y(1))0.d0) xk=xk*1.2d0 ! Decelerate if heading the wrong way 51 if (Y(3)Y(2)) goto 100 ! Restore the X(i) do i=1, l X(i)=X1(i) end do goto 200 100 Y(1)=Y(2); Y(2)=Y(3) ! Obtain new values 200 Y(3)=Eval(X,D) ! Update X(i) call Utilit(l,xk,D,X,X1,Y) ! Check for maximum iterations and convergence n=n+1 if (n>=m) return if (dabs(Y(3)-Y(2))