#include #include #include #include "IpIpoptApplication.hpp" #include "IpTNLP.hpp" using namespace Ipopt; class HS071_NLP : public TNLP { public: double thevalue; /** default constructor */ HS071_NLP(); /** default destructor */ virtual ~HS071_NLP(); double getvalue() { return thevalue; }; /**@name Overloaded from TNLP */ //@{ /** Method to return some info about the nlp */ virtual bool get_nlp_info(Index& n, Index& m, Index& nnz_jac_g, Index& nnz_h_lag, IndexStyleEnum& index_style); /** Method to return the bounds for my problem */ virtual bool get_bounds_info(Index n, Number* x_l, Number* x_u, Index m, Number* g_l, Number* g_u); /** Method to return the starting point for the algorithm */ virtual bool get_starting_point(Index n, bool init_x, Number* x, bool init_z, Number* z_L, Number* z_U, Index m, bool init_lambda, Number* lambda); /** Method to return the objective value */ virtual bool eval_f(Index n, const Number* x, bool new_x, Number& obj_value); /** Method to return the gradient of the objective */ virtual bool eval_grad_f(Index n, const Number* x, bool new_x, Number* grad_f); /** Method to return the constraint residuals */ virtual bool eval_g(Index n, const Number* x, bool new_x, Index m, Number* g); /** Method to return: * 1) The structure of the jacobian (if "values" is NULL) * 2) The values of the jacobian (if "values" is not NULL) */ virtual bool eval_jac_g(Index n, const Number* x, bool new_x, Index m, Index nele_jac, Index* iRow, Index *jCol, Number* values); /** Method to return: * 1) The structure of the hessian of the lagrangian (if "values" is NULL) * 2) The values of the hessian of the lagrangian (if "values" is not NULL) */ virtual bool eval_h(Index n, const Number* x, bool new_x, Number obj_factor, Index m, const Number* lambda, bool new_lambda, Index nele_hess, Index* iRow, Index* jCol, Number* values); /** @name Solution Methods */ //@{ /** This method is called when the algorithm is complete so the TNLP can store/write the solution */ virtual void finalize_solution(SolverReturn status, Index n, const Number* x, const Number* z_L, const Number* z_U, Index m, const Number* g, const Number* lambda, Number obj_value, const IpoptData* ip_data, IpoptCalculatedQuantities* ip_cq); //@} private: HS071_NLP(const HS071_NLP&); HS071_NLP& operator=(const HS071_NLP&); }; // constructor HS071_NLP::HS071_NLP() {} //destructor HS071_NLP::~HS071_NLP() {} // returns the size of the problem bool HS071_NLP::get_nlp_info(Index& n, Index& m, Index& nnz_jac_g, Index& nnz_h_lag, IndexStyleEnum& index_style) { // The problem described in HS071_NLP.hpp has 2 variables, x[0] through x[1] n = 2; // use the C style indexing (0-based) index_style = TNLP::C_STYLE; return true; } // returns the variable bounds bool HS071_NLP::get_bounds_info(Index n, Number* x_l, Number* x_u, Index m, Number* g_l, Number* g_u) { // the variables have lower bounds of 1 for (Index i=0; iOptions()->SetStringValue("mu_strategy", "adaptive"); app->Options()->SetStringValue("hessian_approximation", "limited-memory"); app->Options()->SetStringValue("output_file", "ipopt.out"); // Initialize the IpoptApplication and process the options ApplicationReturnStatus status; status = app->Initialize(); if (status != Solve_Succeeded) { std::cout << std::endl << std::endl << "*** Error during initialization!" << std::endl; return (int) status; } // Ask Ipopt to solve the problem status = app->OptimizeTNLP(mynlp); if (status == Solve_Succeeded) { std::cout << std::endl << std::endl << "*** The problem solved!" << std::endl; } else { std::cout << std::endl << std::endl << "*** The problem FAILED!" << std::endl; } thevalue = mynlp->eval_f(); // As the SmartPtrs go out of scope, the reference count will be decremented and the objects will automatically be deleted. return (int) status; }