11import NLPModels: obj, grad, grad!, objgrad!, objgrad, hess
22
33export LineModel
4- export obj, grad, derivative, grad!, objgrad!, objgrad, derivative!, hess, redirect!
4+ export obj, grad, derivative, grad!, objgrad!, objgrad, derivative!, hess, hess!, redirect!
55
66""" A type to represent the restriction of a function to a direction.
77Given f : R → Rⁿ, x ∈ Rⁿ and a nonzero direction d ∈ Rⁿ,
@@ -12,17 +12,18 @@ represents the function ϕ : R → R defined by
1212
1313 ϕ(t) := f(x + td).
1414"""
15- mutable struct LineModel{T, S} <: AbstractNLPModel{T, S}
15+ mutable struct LineModel{T, S, M <: AbstractNLPModel{T, S} } <: AbstractNLPModel{T, S}
1616 meta:: NLPModelMeta{T, S}
1717 counters:: Counters
18- nlp:: AbstractNLPModel{T, S}
18+ nlp:: M
1919 x:: S
2020 d:: S
21+ xt:: S
2122end
2223
23- function LineModel (nlp:: AbstractNLPModel{T, S} , x:: S , d:: S ) where {T, S}
24+ function LineModel (nlp:: AbstractNLPModel{T, S} , x:: S , d:: S ; xt :: S = similar (x) ) where {T, S}
2425 meta = NLPModelMeta {T, S} (1 , x0 = zeros (T, 1 ), name = " LineModel to $(nlp. meta. name) )" )
25- return LineModel (meta, Counters (), nlp, x, d)
26+ return LineModel (meta, Counters (), nlp, x, d, xt )
2627end
2728
2829""" `redirect!(ϕ, x, d)`
4041"""
4142function obj (f:: LineModel , t:: AbstractFloat )
4243 NLPModels. increment! (f, :neval_obj )
43- return obj (f. nlp, f. x + t * f. d)
44+ @. f. xt = f. x + t * f. d
45+ return obj (f. nlp, f. xt)
4446end
4547
4648""" `grad(f, t)` evaluates the first derivative of the `LineModel`
5355"""
5456function grad (f:: LineModel , t:: AbstractFloat )
5557 NLPModels. increment! (f, :neval_grad )
56- return dot (grad (f. nlp, f. x + t * f. d), f. d)
58+ @. f. xt = f. x + t * f. d
59+ return dot (grad (f. nlp, f. xt), f. d)
5760end
5861derivative (f:: LineModel , t:: AbstractFloat ) = grad (f, t)
5962
@@ -69,7 +72,8 @@ The gradient ∇f(x + td) is stored in `g`.
6972"""
7073function grad! (f:: LineModel , t:: AbstractFloat , g:: AbstractVector )
7174 NLPModels. increment! (f, :neval_grad )
72- return dot (grad! (f. nlp, f. x + t * f. d, g), f. d)
75+ @. f. xt = f. x + t * f. d
76+ return dot (grad! (f. nlp, f. xt, g), f. d)
7377end
7478derivative! (f:: LineModel , t:: AbstractFloat , g:: AbstractVector ) = grad! (f, t, g)
7579
@@ -86,8 +90,9 @@ The gradient ∇f(x + td) is stored in `g`.
8690function objgrad! (f:: LineModel , t:: AbstractFloat , g:: AbstractVector )
8791 NLPModels. increment! (f, :neval_obj )
8892 NLPModels. increment! (f, :neval_grad )
89- fx, gx = objgrad! (f. nlp, f. x + t * f. d, g)
90- return fx, dot (gx, f. d)
93+ @. f. xt = f. x + t * f. d
94+ fx, _ = objgrad! (f. nlp, f. xt, g)
95+ return fx, dot (g, f. d)
9196end
9297
9398""" `objgrad(f, t)` evaluates the objective and first derivative of the `LineModel`
@@ -112,5 +117,20 @@ i.e.,
112117"""
113118function hess (f:: LineModel , t:: AbstractFloat )
114119 NLPModels. increment! (f, :neval_hess )
115- return dot (f. d, hprod (f. nlp, f. x + t * f. d, f. d))
120+ @. f. xt = f. x + t * f. d
121+ return dot (f. d, hprod (f. nlp, f. xt, f. d))
122+ end
123+
124+ """ Evaluate the second derivative of the `LineModel`
125+
126+ ϕ(t) := f(x + td),
127+
128+ i.e.,
129+
130+ ϕ"(t) = dᵀ∇²f(x + td)d.
131+ """
132+ function hess! (f:: LineModel , t:: AbstractFloat , Hv:: AbstractVector )
133+ NLPModels. increment! (f, :neval_hess )
134+ @. f. xt = f. x + t * f. d
135+ return dot (f. d, hprod! (f. nlp, f. xt, f. d, Hv))
116136end
0 commit comments