ROL
ROL_RandVarFunctional.hpp
Go to the documentation of this file.
1// @HEADER
2// ************************************************************************
3//
4// Rapid Optimization Library (ROL) Package
5// Copyright (2014) Sandia Corporation
6//
7// Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8// license for use of this work by or on behalf of the U.S. Government.
9//
10// Redistribution and use in source and binary forms, with or without
11// modification, are permitted provided that the following conditions are
12// met:
13//
14// 1. Redistributions of source code must retain the above copyright
15// notice, this list of conditions and the following disclaimer.
16//
17// 2. Redistributions in binary form must reproduce the above copyright
18// notice, this list of conditions and the following disclaimer in the
19// documentation and/or other materials provided with the distribution.
20//
21// 3. Neither the name of the Corporation nor the names of the
22// contributors may be used to endorse or promote products derived from
23// this software without specific prior written permission.
24//
25// THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36//
37// Questions? Contact lead developers:
38// Drew Kouri (dpkouri@sandia.gov) and
39// Denis Ridzal (dridzal@sandia.gov)
40//
41// ************************************************************************
42// @HEADER
43
44#ifndef ROL_RANDVARFUNCTIONAL_HPP
45#define ROL_RANDVARFUNCTIONAL_HPP
46
47#include "ROL_Vector.hpp"
48#include "ROL_Ptr.hpp"
52
77namespace ROL {
78
79template<class Real>
81private:
84 Ptr<ScalarController<Real>> value_storage_;
85 Ptr<VectorController<Real>> gradient_storage_;
86 Ptr<ScalarController<Real>> gradvec_storage_;
87 Ptr<VectorController<Real>> hessvec_storage_;
88
89protected:
90 Real val_;
91 Real gv_;
92 Ptr<Vector<Real> > g_;
93 Ptr<Vector<Real> > hv_;
94 Ptr<Vector<Real> > dualVector_;
96
97 std::vector<Real> point_;
98 Real weight_;
99
100 // Evaluate objective function at current parameter
102 Real &tol) {
103 Real val(0);
104 bool isComputed = false;
105 if (storage_) {
106 isComputed = value_storage_->get(val,point_);
107 }
108 if (!isComputed || !storage_) {
109 obj.setParameter(point_);
110 val = obj.value(x,tol);
111 if (storage_) {
112 value_storage_->set(val,point_);
113 }
114 }
115 return val;
116 }
117
118 // Evaluate gradient of objective function at current parameter
120 const Vector<Real> &x, Real &tol) {
121 bool isComputed = false;
122 if (storage_) {
123 isComputed = gradient_storage_->get(g,point_);
124 }
125 if (!isComputed || !storage_) {
126 obj.setParameter(point_);
127 obj.gradient(g,x,tol);
128 if ( storage_ ) {
130 }
131 }
132 }
133
134 // Evaluate Gradient-times-a-vector at current parameter
136 const Vector<Real> &v, const Vector<Real> &x,
137 Real &tol) {
138 Real gv(0);
139 computeGradient(g,obj,x,tol);
140 bool isComputed = false;
141 if (storage_hessvec_) {
142 isComputed = gradvec_storage_->get(gv,point_);
143 }
144 if (!isComputed || !storage_hessvec_) {
145 //gv = g.dot(v.dual());
146 gv = g.apply(v);
147 if (storage_hessvec_) {
148 gradvec_storage_->set(gv,point_);
149 }
150 }
151 return gv;
152 }
153
154 // Evaluate Hessian-times-a-vector at current parameter
156 const Vector<Real> &v, const Vector<Real> &x,
157 Real &tol) {
158 bool isComputed = false;
159 if (storage_hessvec_) {
160 isComputed = hessvec_storage_->get(hv,point_);
161 }
162 if (!isComputed || !storage_hessvec_) {
163 obj.setParameter(point_);
164 obj.hessVec(hv,v,x,tol);
165 if (storage_hessvec_) {
166 hessvec_storage_->set(hv,point_);
167 }
168 }
169 }
170
171public:
173
175 value_storage_(nullPtr),
176 gradient_storage_(nullPtr),
177 gradvec_storage_(nullPtr),
178 hessvec_storage_(nullPtr),
179 val_(0), gv_(0), firstReset_(true),
180 point_({}), weight_(0) {}
181
182 void useStorage(bool storage) {
183 storage_ = storage;
184 if (storage) {
185 if (value_storage_ == nullPtr) {
186 value_storage_ = makePtr<ScalarController<Real>>();
187 }
188 if (gradient_storage_ == nullPtr) {
189 gradient_storage_ = makePtr<VectorController<Real>>();
190 }
191 }
192 }
193
194 void useHessVecStorage(bool storage) {
195 storage_hessvec_ = storage;
196 if (storage) {
197 useStorage(storage);
198 if (gradvec_storage_ == nullPtr) {
199 gradvec_storage_ = makePtr<ScalarController<Real>>();
200 }
201 if (hessvec_storage_ == nullPtr) {
202 hessvec_storage_ = makePtr<VectorController<Real>>();
203 }
204 }
205 }
206
207 virtual void setStorage(const Ptr<ScalarController<Real>> &value_storage,
208 const Ptr<VectorController<Real>> &gradient_storage) {
209 value_storage_ = value_storage;
210 gradient_storage_ = gradient_storage;
211 useStorage(true);
212 }
213
214 virtual void setHessVecStorage(const Ptr<ScalarController<Real>> &gradvec_storage,
215 const Ptr<VectorController<Real>> &hessvec_storage) {
216 gradvec_storage_ = gradvec_storage;
217 hessvec_storage_ = hessvec_storage;
218 useHessVecStorage(true);
219 }
220
225 virtual void resetStorage(bool flag = true) {
226 if (storage_) {
227 value_storage_->objectiveUpdate();
228 if (flag) {
229 gradient_storage_->objectiveUpdate();
230 if (storage_hessvec_) {
231 gradvec_storage_->objectiveUpdate();
232 hessvec_storage_->objectiveUpdate();
233 }
234 }
235 }
236 }
237 virtual void resetStorage(UpdateType type) {
238 if (storage_) {
239 value_storage_->objectiveUpdate(type);
240 gradient_storage_->objectiveUpdate(type);
241 if (storage_hessvec_) {
242 gradvec_storage_->objectiveUpdate(type);
243 hessvec_storage_->objectiveUpdate(type);
244 }
245 }
246 }
247
252 virtual void initialize(const Vector<Real> &x) {
253 // Create memory for class members
254 if ( firstReset_ ) {
255 g_ = x.dual().clone();
256 hv_ = x.dual().clone();
257 dualVector_ = x.dual().clone();
258 firstReset_ = false;
259 }
260 // Zero member variables
261 const Real zero(0);
262 val_ = zero; gv_ = zero;
263 g_->zero(); hv_->zero(); dualVector_->zero();
264 if (storage_hessvec_) {
265 gradvec_storage_->reset();
266 hessvec_storage_->reset();
267 }
268 }
269
270 virtual void setSample(const std::vector<Real> &point, const Real weight) {
271 point_.assign(point.begin(),point.end());
272 weight_ = weight;
273 }
274
280 virtual Real computeStatistic(const Ptr<const std::vector<Real>> &xstat) const {
281 Real stat(0);
282 if (xstat != nullPtr && !xstat->empty()) {
283 stat = (*xstat)[0];
284 }
285 return stat;
286 }
287
295 virtual void updateValue(Objective<Real> &obj,
296 const Vector<Real> &x,
297 const std::vector<Real> &xstat,
298 Real &tol) {
299 Real val = computeValue(obj,x,tol);
300 val_ += weight_ * val;
301 }
302
313 const Vector<Real> &x,
314 const std::vector<Real> &xstat,
315 Real &tol) {
316 computeGradient(*dualVector_,obj,x,tol);
317 g_->axpy(weight_,*dualVector_);
318 }
319
336 const Vector<Real> &v,
337 const std::vector<Real> &vstat,
338 const Vector<Real> &x,
339 const std::vector<Real> &xstat,
340 Real &tol) {
341 computeHessVec(*dualVector_,obj,v,x,tol);
342 hv_->axpy(weight_,*dualVector_);
343 }
344
353 virtual Real getValue(const Vector<Real> &x,
354 const std::vector<Real> &xstat,
355 SampleGenerator<Real> &sampler) {
356 Real val(0);
357 sampler.sumAll(&val_,&val,1);
358 return val;
359 }
360
372 virtual void getGradient(Vector<Real> &g,
373 std::vector<Real> &gstat,
374 const Vector<Real> &x,
375 const std::vector<Real> &xstat,
376 SampleGenerator<Real> &sampler) {
377 sampler.sumAll(*g_,g);
378 }
379
391 virtual void getHessVec(Vector<Real> &hv,
392 std::vector<Real> &hvstat,
393 const Vector<Real> &v,
394 const std::vector<Real> &vstat,
395 const Vector<Real> &x,
396 const std::vector<Real> &xstat,
397 SampleGenerator<Real> &sampler) {
398 sampler.sumAll(*hv_,hv);
399 }
400};
401
402}
403
404#endif
Objective_SerialSimOpt(const Ptr< Obj > &obj, const V &ui) z0 zero)()
Provides the interface to evaluate objective functions.
virtual void setParameter(const std::vector< Real > &param)
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
virtual void hessVec(Vector< Real > &hv, const Vector< Real > &v, const Vector< Real > &x, Real &tol)
Apply Hessian approximation to vector.
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
Provides the interface to implement any functional that maps a random variable to a (extended) real n...
Ptr< ScalarController< Real > > gradvec_storage_
Real computeValue(Objective< Real > &obj, const Vector< Real > &x, Real &tol)
virtual void resetStorage(UpdateType type)
virtual void setSample(const std::vector< Real > &point, const Real weight)
virtual void initialize(const Vector< Real > &x)
Initialize temporary variables.
virtual void updateHessVec(Objective< Real > &obj, const Vector< Real > &v, const std::vector< Real > &vstat, const Vector< Real > &x, const std::vector< Real > &xstat, Real &tol)
Update internal risk measure storage for Hessian-time-a-vector computation.
void computeHessVec(Vector< Real > &hv, Objective< Real > &obj, const Vector< Real > &v, const Vector< Real > &x, Real &tol)
virtual void setStorage(const Ptr< ScalarController< Real > > &value_storage, const Ptr< VectorController< Real > > &gradient_storage)
void computeGradient(Vector< Real > &g, Objective< Real > &obj, const Vector< Real > &x, Real &tol)
Ptr< VectorController< Real > > gradient_storage_
virtual void updateGradient(Objective< Real > &obj, const Vector< Real > &x, const std::vector< Real > &xstat, Real &tol)
Update internal risk measure storage for gradient computation.
virtual void getHessVec(Vector< Real > &hv, std::vector< Real > &hvstat, const Vector< Real > &v, const std::vector< Real > &vstat, const Vector< Real > &x, const std::vector< Real > &xstat, SampleGenerator< Real > &sampler)
Return risk measure Hessian-times-a-vector.
Ptr< Vector< Real > > dualVector_
Ptr< VectorController< Real > > hessvec_storage_
void useHessVecStorage(bool storage)
virtual void getGradient(Vector< Real > &g, std::vector< Real > &gstat, const Vector< Real > &x, const std::vector< Real > &xstat, SampleGenerator< Real > &sampler)
Return risk measure (sub)gradient.
virtual void setHessVecStorage(const Ptr< ScalarController< Real > > &gradvec_storage, const Ptr< VectorController< Real > > &hessvec_storage)
virtual void resetStorage(bool flag=true)
Reset internal storage.
virtual Real computeStatistic(const Ptr< const std::vector< Real > > &xstat) const
Compute statistic.
virtual Real getValue(const Vector< Real > &x, const std::vector< Real > &xstat, SampleGenerator< Real > &sampler)
Return risk measure value.
virtual void updateValue(Objective< Real > &obj, const Vector< Real > &x, const std::vector< Real > &xstat, Real &tol)
Update internal storage for value computation.
Ptr< ScalarController< Real > > value_storage_
Real computeGradVec(Vector< Real > &g, Objective< Real > &obj, const Vector< Real > &v, const Vector< Real > &x, Real &tol)
void sumAll(Real *input, Real *output, int dim) const
Defines the linear algebra or vector space interface.
virtual Real apply(const Vector< Real > &x) const
Apply to a dual vector. This is equivalent to the call .
virtual const Vector & dual() const
Return dual representation of , for example, the result of applying a Riesz map, or change of basis,...
virtual ROL::Ptr< Vector > clone() const =0
Clone to make a new (uninitialized) vector.