compute the gradient of the function (Jacobian):
| Type | Intent | Optional | Attributes | Name | ||
|---|---|---|---|---|---|---|
| class(nlesolver_type), | intent(inout) | :: | me | |||
| real(kind=wp), | intent(in), | dimension(:) | :: | x | ||
| real(kind=wp), | intent(out), | dimension(:,:) | :: | g |
subroutine grad(me,x,g) !! compute the gradient of the function (Jacobian): implicit none class(nlesolver_type),intent(inout) :: me real(wp),dimension(:),intent(in) :: x real(wp),dimension(:,:),intent(out) :: g f_evals = f_evals + 2 ! to approximate forward diff derivatives g(1,1) = 2.0_wp * x(1) !df(1)/dx g(2,1) = 0.0_wp !df(2)/dx g(1,2) = 1.0_wp !df(1)/dy g(2,2) = 1.0_wp !df(2)/dy end subroutine grad