00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013 #ifndef _optimizer_h
00014 #define _optimizer_h
00015
00016 #include <iostream>
00017 #include <iomanip>
00018
00019 #include <costfunction.h>
00020 #include <criteria.h>
00021
00022 #include <ql/handle.hpp>
00023
00024
00025 using QuantLib::DotProduct;
00026 using std::setw;
00027
00028 template <class V> class OptimizationMethod;
00029 template <class V> class OptimizationProblem;
00030
00031
00035 template <class V>
00036 class OptimizationMethod {
00037 public:
00038 typedef double value_type;
00039 protected:
00041 V initialValue_;
00043 int iterationNumber_;
00045 OptimizationEndCriteria endCriteria_;
00047 int functionEvaluation_, gradientEvaluation_;
00049 value_type functionValue_, squaredNorm_;
00051 V x_, searchDirection_;
00052 public:
00053 explicit OptimizationMethod()
00054 : iterationNumber_(0),
00055 functionEvaluation_(0), gradientEvaluation_(0),
00056 functionValue_(1), squaredNorm_(1) {}
00057 virtual ~OptimizationMethod() {}
00058
00060 inline void setInitialValue(const V& initialValue)
00061 {
00062 iterationNumber_ = 0;
00063 initialValue_ = initialValue;
00064 x_ = initialValue;
00065 searchDirection_ = V(x_.size());
00066 }
00068 inline void setEndCriteria(const OptimizationEndCriteria& endCriteria) { endCriteria_ = endCriteria;}
00069
00071 inline int& iterationNumber() { return iterationNumber_;}
00073 inline OptimizationEndCriteria& endCriteria() { return endCriteria_;}
00075 inline int & functionEvaluation() { return functionEvaluation_; }
00077 inline int & gradientEvaluation() { return gradientEvaluation_; }
00079 inline value_type& functionValue() { return functionValue_; }
00081 inline value_type& gradientNormValue() { return squaredNorm_; }
00083 V& x() { return x_;}
00085 V& searchDirection() { return searchDirection_;}
00086
00088 virtual void Minimize(OptimizationProblem<V> & P) = 0;
00089 };
00090
00091
00092
00097 template <class V>
00098 class OptimizationProblemOutput {
00099 std::ostream& os_;
00100 public:
00101 typedef double value_type;
00102
00103 OptimizationProblemOutput() : os_(std::cout) {}
00104 OptimizationProblemOutput(std::ostream& os) : os_(os) {}
00105 virtual ~OptimizationProblemOutput() {}
00106
00107 virtual void init(OptimizationMethod<V>& method)
00108 {
00109 #ifdef DISPLAY_OPTI
00110 os_ << "Initial value : " << method.x() << std::endl;
00111 os_ << std::endl
00112 << " fen : function evaluation number" << std::endl
00113 << " gen : gradient evaluation number" << std::endl
00114 << "| iter | function value | gradient norm | line-search step | fen | gen |" << std::endl
00115 << "|------|------------------|-----------------------|-----------------------|-------|-------|" << std::endl;
00116 #endif
00117 }
00118
00119 virtual void save(int iterationNumber,
00120 value_type function,
00121 value_type normGradient,
00122 value_type lineSearchStep,
00123 OptimizationMethod<V>& method)
00124 {
00125 method.functionValue() = function; method.gradientNormValue() = normGradient;
00126 #ifdef DISPLAY_OPTI
00127 os_.setf(std::ios::scientific,std::ios::floatfield);
00128 os_ << "|" << setw(4) << iterationNumber
00129 << " |" << setw(15) << function
00130 << " |" << setw(19) << normGradient
00131 << " |" << setw(19) << lineSearchStep
00132 << " |" << setw(5) << method.functionEvaluation()
00133 << " |" << setw(5) << method.gradientEvaluation()
00134 << " |" << std::endl;
00135 os_.flush();
00136 #endif
00137 }
00138 };
00139
00143 template <class V>
00144 class OptimizationProblem {
00145 public:
00146 typedef double value_type;
00147 protected:
00149 CostFunction<V> & costFunction_;
00151 OptimizationMethod<V> & method_;
00152 QuantLib::Handle<OptimizationProblemOutput<V> > opo_;
00153 public:
00155 OptimizationProblem(CostFunction<V> & f,
00156 OptimizationMethod<V> & meth)
00157 : costFunction_(f), method_(meth),
00158 opo_(QuantLib::Handle<OptimizationProblemOutput<V> >(new OptimizationProblemOutput<V>()))
00159 {
00160 opo_->init(method_);
00161 }
00163 OptimizationProblem(CostFunction<V> & f,
00164 OptimizationMethod<V> & meth,
00165 QuantLib::Handle<OptimizationProblemOutput<V> >& opo)
00166 : costFunction_(f), method_(meth), opo_(opo)
00167 {
00168 opo_->init(method_);
00169 }
00170
00172 ~OptimizationProblem() {}
00173
00175 value_type value(const V& x)
00176 {
00177 method_.functionEvaluation()++;
00178 return costFunction_.value(x);
00179 }
00180
00182 void firstDerivative(V& grad_f, const V& x)
00183 {
00184 method_.gradientEvaluation()++;
00185 costFunction_.firstDerivative(grad_f, x);
00186 }
00187
00189 value_type valueAndFirstDerivative(V& grad_f, const V& x)
00190 {
00191 method_.functionEvaluation()++; method_.gradientEvaluation()++;
00192 return costFunction_.valueAndFirstDerivative(grad_f, x);
00193 }
00194
00196 OptimizationMethod<V> & optimisationMethod() { return method_;}
00197
00199 void Minimize()
00200 {
00201 method_.Minimize( *this );
00202 }
00203
00204 V& minimumValue() { return method_.x();}
00205
00207 void Save(int iterationNumber,
00208 value_type function,
00209 value_type normGradient,
00210 value_type lineSearchStep,
00211 OptimizationMethod<V>& method)
00212 {
00213 opo_->save(iterationNumber, function, normGradient, lineSearchStep, method);
00214 }
00215 };
00216
00217 #endif