given an m by n matrix a, an n by n nonsingular diagonal matrix d, an m-vector b, and a positive number delta, the problem is to determine the convex combination x of the gauss-newton and scaled gradient directions that minimizes (ax - b) in the least squares sense, subject to the restriction that the euclidean norm of dx be at most delta.
this subroutine completes the solution of the problem if it is provided with the necessary information from the qr factorization of a. that is, if a = qr, where q has orthogonal columns and r is an upper triangular matrix, then dogleg expects the full upper triangle of r and the first n components of (q transpose)b.
the subroutine statement is
subroutine dogleg(n,r,lr,diag,qtb,delta,x,wa1,wa2)
where
n is a positive integer input variable set to the order of r.
r is an input array of length lr which must contain the upper triangular matrix r stored by rows.
lr is a positive integer input variable not less than (n*(n+1))/2.
diag is an input array of length n which must contain the diagonal elements of the matrix d.
qtb is an input array of length n which must contain the first n elements of the vector (q transpose)*b.
delta is a positive input variable which specifies an upper bound on the euclidean norm of d*x.
x is an output array of length n which contains the desired convex combination of the gauss-newton direction and the scaled gradient direction.
wa1 and wa2 are work arrays of length n.
Type | Intent | Optional | Attributes | Name | ||
---|---|---|---|---|---|---|
integer | :: | n | ||||
real(kind=wp) | :: | r(lr) | ||||
integer | :: | lr | ||||
real(kind=wp) | :: | diag(n) | ||||
real(kind=wp) | :: | qtb(n) | ||||
real(kind=wp) | :: | delta | ||||
real(kind=wp) | :: | x(n) | ||||
real(kind=wp) | :: | wa1(n) | ||||
real(kind=wp) | :: | wa2(n) |
subroutine dogleg(n,r,lr,diag,qtb,delta,x,wa1,wa2) implicit none integer n , lr real(wp) delta real(wp) r(lr) , diag(n) , qtb(n) , x(n) , wa1(n) , wa2(n) integer i , j , jj , jp1 , k , l real(wp) alpha , bnorm , epsmch , gnorm , qnorm , sgnorm , sum , temp epsmch = dpmpar(1) ! the machine precision ! ! first, calculate the gauss-newton direction. ! jj = (n*(n+1))/2 + 1 do k = 1 , n j = n - k + 1 jp1 = j + 1 jj = jj - k l = jj + 1 sum = zero if ( n>=jp1 ) then do i = jp1 , n sum = sum + r(l)*x(i) l = l + 1 enddo endif temp = r(jj) if ( temp==zero ) then l = j do i = 1 , j temp = max(temp,abs(r(l))) l = l + n - i enddo temp = epsmch*temp if ( temp==zero ) temp = epsmch endif x(j) = (qtb(j)-sum)/temp enddo ! ! test whether the gauss-newton direction is acceptable. ! do j = 1 , n wa1(j) = zero wa2(j) = diag(j)*x(j) enddo qnorm = enorm(n,wa2) if ( qnorm>delta ) then ! ! the gauss-newton direction is not acceptable. ! next, calculate the scaled gradient direction. ! l = 1 do j = 1 , n temp = qtb(j) do i = j , n wa1(i) = wa1(i) + r(l)*temp l = l + 1 enddo wa1(j) = wa1(j)/diag(j) enddo ! ! calculate the norm of the scaled gradient and test for ! the special case in which the scaled gradient is zero. ! gnorm = enorm(n,wa1) sgnorm = zero alpha = delta/qnorm if ( gnorm/=zero ) then ! ! calculate the point along the scaled gradient ! at which the quadratic is minimized. ! do j = 1 , n wa1(j) = (wa1(j)/gnorm)/diag(j) enddo l = 1 do j = 1 , n sum = zero do i = j , n sum = sum + r(l)*wa1(i) l = l + 1 enddo wa2(j) = sum enddo temp = enorm(n,wa2) sgnorm = (gnorm/temp)/temp ! ! test whether the scaled gradient direction is acceptable. ! alpha = zero if ( sgnorm<delta ) then ! ! the scaled gradient direction is not acceptable. ! finally, calculate the point along the dogleg ! at which the quadratic is minimized. ! bnorm = enorm(n,qtb) temp = (bnorm/gnorm)*(bnorm/qnorm)*(sgnorm/delta) temp = temp - (delta/qnorm)*(sgnorm/delta)**2 + & sqrt((temp-(delta/qnorm))**2+& (one-(delta/qnorm)**2)*(one-(sgnorm/delta)**2)) alpha = ((delta/qnorm)*(one-(sgnorm/delta)**2))/temp endif endif ! ! form appropriate convex combination of the gauss-newton ! direction and the scaled gradient direction. ! temp = (one-alpha)*min(sgnorm,delta) do j = 1 , n x(j) = temp*wa1(j) + alpha*x(j) enddo endif end subroutine dogleg