11import NLPModels: obj, grad, grad!, objgrad!, objgrad, hess
22
33export LineModel
4- export obj, grad, derivative, grad!, objgrad!, objgrad, derivative!, hess, redirect!
4+ export obj, grad, derivative, grad!, objgrad!, objgrad, derivative!, hess, hess!, redirect!
55
66""" A type to represent the restriction of a function to a direction.
77Given f : R → Rⁿ, x ∈ Rⁿ and a nonzero direction d ∈ Rⁿ,
@@ -12,17 +12,23 @@ represents the function ϕ : R → R defined by
1212
1313 ϕ(t) := f(x + td).
1414"""
15- mutable struct LineModel <: AbstractNLPModel
15+ mutable struct LineModel{T} <: AbstractNLPModel
1616 meta:: NLPModelMeta
1717 counters:: Counters
1818 nlp:: AbstractNLPModel
19- x:: AbstractVector
20- d:: AbstractVector
19+ x:: Vector{T}
20+ d:: Vector{T}
21+ xt:: Vector{T}
2122end
2223
23- function LineModel (nlp:: AbstractNLPModel , x:: AbstractVector , d:: AbstractVector )
24- meta = NLPModelMeta (1 , x0 = zeros (eltype (x), 1 ), name = " LineModel to $(nlp. meta. name) )" )
25- return LineModel (meta, Counters (), nlp, x, d)
24+ function LineModel (
25+ nlp:: AbstractNLPModel ,
26+ x:: AbstractVector{T} ,
27+ d:: AbstractVector{T} ;
28+ xt:: AbstractVector{T} = similar (x),
29+ ) where {T}
30+ meta = NLPModelMeta (1 , x0 = zeros (T, 1 ), name = " LineModel to $(nlp. meta. name) )" )
31+ return LineModel {T} (meta, Counters (), nlp, x, d, xt)
2632end
2733
2834""" `redirect!(ϕ, x, d)`
4046"""
4147function obj (f:: LineModel , t:: AbstractFloat )
4248 NLPModels. increment! (f, :neval_obj )
43- return obj (f. nlp, f. x + t * f. d)
49+ @. f. xt = f. x + t * f. d
50+ return obj (f. nlp, f. xt)
4451end
4552
4653""" `grad(f, t)` evaluates the first derivative of the `LineModel`
5360"""
5461function grad (f:: LineModel , t:: AbstractFloat )
5562 NLPModels. increment! (f, :neval_grad )
56- return dot (grad (f. nlp, f. x + t * f. d), f. d)
63+ @. f. xt = f. x + t * f. d
64+ return dot (grad (f. nlp, f. xt), f. d)
5765end
5866derivative (f:: LineModel , t:: AbstractFloat ) = grad (f, t)
5967
@@ -69,7 +77,8 @@ The gradient ∇f(x + td) is stored in `g`.
6977"""
7078function grad! (f:: LineModel , t:: AbstractFloat , g:: AbstractVector )
7179 NLPModels. increment! (f, :neval_grad )
72- return dot (grad! (f. nlp, f. x + t * f. d, g), f. d)
80+ @. f. xt = f. x + t * f. d
81+ return dot (grad! (f. nlp, f. xt, g), f. d)
7382end
7483derivative! (f:: LineModel , t:: AbstractFloat , g:: AbstractVector ) = grad! (f, t, g)
7584
@@ -86,7 +95,8 @@ The gradient ∇f(x + td) is stored in `g`.
8695function objgrad! (f:: LineModel , t:: AbstractFloat , g:: AbstractVector )
8796 NLPModels. increment! (f, :neval_obj )
8897 NLPModels. increment! (f, :neval_grad )
89- fx, gx = objgrad! (f. nlp, f. x + t * f. d, g)
98+ @. f. xt = f. x + t * f. d
99+ fx, gx = objgrad! (f. nlp, f. xt, g)
90100 return fx, dot (gx, f. d)
91101end
92102
@@ -112,5 +122,20 @@ i.e.,
112122"""
113123function hess (f:: LineModel , t:: AbstractFloat )
114124 NLPModels. increment! (f, :neval_hess )
115- return dot (f. d, hprod (f. nlp, f. x + t * f. d, f. d))
125+ @. f. xt = f. x + t * f. d
126+ return dot (f. d, hprod (f. nlp, f. xt, f. d))
127+ end
128+
129+ """ Evaluate the second derivative of the `LineModel`
130+
131+ ϕ(t) := f(x + td),
132+
133+ i.e.,
134+
135+ ϕ"(t) = dᵀ∇²f(x + td)d.
136+ """
137+ function hess! (f:: LineModel , t:: AbstractFloat , Hv:: AbstractVector )
138+ NLPModels. increment! (f, :neval_hess )
139+ @. f. xt = f. x + t * f. d
140+ return dot (f. d, hprod! (f. nlp, f. xt, f. d, Hv))
116141end
0 commit comments