8#ifndef _TNLP2FPNLP_HPP_
9#define _TNLP2FPNLP_HPP_
42 { use_feasibility_pump_objective_ = use_feasibility_pump_objective; }
47 { use_cutoff_constraint_ = use_cutoff_constraint; }
51 { use_local_branching_constraint_ = use_local_branching_constraint; }
61 { assert(rhs_local_branching_constraint >= 0);
62 rhs_local_branching_constraint_ = rhs_local_branching_constraint; }
81 assert(lambda >= 0. && lambda <= 1.);
85 assert(norm >0 && norm < 3);
108 if(use_cutoff_constraint_) {
110 if(lambda!=NULL)lambda[m2] = 0;
112 if(use_local_branching_constraint_) {
114 if(lambda!= NULL)lambda[m2] = 0;
116 int ret_code = tnlp_->get_starting_point(n, init_x, x,
117 init_z, z_L, z_U, m2, init_lambda, lambda);
160 return tnlp_->get_variables_linearity(n, var_types);;
169 if(use_cutoff_constraint_) {
173 if(use_local_branching_constraint_) {
177 return tnlp_->get_constraints_linearity(m2, const_types);
183 objectiveScalingFactor_ = value;
187 return objectiveScalingFactor_;
233 double objectiveScalingFactor_;
238 bool use_feasibility_pump_objective_;
242 bool use_cutoff_constraint_;
245 bool use_local_branching_constraint_;
254 double rhs_local_branching_constraint_;
This is an adapter class to convert an NLP to a Feasibility Pump NLP by changing the objective functi...
virtual bool get_variables_linearity(Ipopt::Index n, LinearityType *var_types)
void set_cutoff(Ipopt::Number cutoff)
Set the cutoff value to use in the cutoff constraint.
virtual bool get_starting_point(Ipopt::Index n, bool init_x, Ipopt::Number *x, bool init_z, Ipopt::Number *z_L, Ipopt::Number *z_U, Ipopt::Index m, bool init_lambda, Ipopt::Number *lambda)
Passed onto tnlp_.
double getObjectiveScaling() const
void set_use_local_branching_constraint(bool use_local_branching_constraint)
Flag to indicate that we want to use a local branching constraint.
virtual bool get_constraints_linearity(Ipopt::Index m, LinearityType *const_types)
overload this method to return the constraint linearity.
void setNorm(int norm)
Set the value for simgma.
void setLambda(double lambda)
Set the value for lambda.
void setObjectiveScaling(double value)
TNLP2FPNLP(const Ipopt::SmartPtr< Ipopt::TNLP > tnlp, double objectiveScalingFactor=100)
Build using tnlp as source problem.
virtual void finalize_solution(Ipopt::SolverReturn status, Ipopt::Index n, const Ipopt::Number *x, const Ipopt::Number *z_L, const Ipopt::Number *z_U, Ipopt::Index m, const Ipopt::Number *g, const Ipopt::Number *lambda, Ipopt::Number obj_value, const Ipopt::IpoptData *ip_data, Ipopt::IpoptCalculatedQuantities *ip_cq)
This method is called when the algorithm is complete so the TNLP can store/write the solution.
virtual bool eval_f(Ipopt::Index n, const Ipopt::Number *x, bool new_x, Ipopt::Number &obj_value)
overloaded to return the value of the objective function
void set_use_feasibility_pump_objective(bool use_feasibility_pump_objective)
Flag to indicate that we want to use the feasibility pump objective.
virtual bool eval_jac_g(Ipopt::Index n, const Ipopt::Number *x, bool new_x, Ipopt::Index m, Ipopt::Index nele_jac, Ipopt::Index *iRow, Ipopt::Index *jCol, Ipopt::Number *values)
overload to return the jacobian of g
void set_use_cutoff_constraint(bool use_cutoff_constraint)
Flag to indicate that we want to use a cutoff constraint This constraint has the form f(x) <= (1-epsi...
virtual bool eval_h(Ipopt::Index n, const Ipopt::Number *x, bool new_x, Ipopt::Number obj_factor, Ipopt::Index m, const Ipopt::Number *lambda, bool new_lambda, Ipopt::Index nele_hess, Ipopt::Index *iRow, Ipopt::Index *jCol, Ipopt::Number *values)
Evaluate the modified Hessian of the Lagrangian.
void set_rhs_local_branching_constraint(double rhs_local_branching_constraint)
Set the rhs of the local branching constraint.
void setSigma(double sigma)
Set the value for sigma.
void use(Ipopt::SmartPtr< TNLP > tnlp)
virtual bool eval_grad_f(Ipopt::Index n, const Ipopt::Number *x, bool new_x, Ipopt::Number *grad_f)
overload this method to return the vector of the gradient of the objective w.r.t.
TNLP2FPNLP(const Ipopt::SmartPtr< TNLP > tnlp, const Ipopt::SmartPtr< TNLP2FPNLP > other)
Build using tnlp as source problem and using other for all other parameters.
virtual ~TNLP2FPNLP()
Default destructor.
virtual bool eval_g(Ipopt::Index n, const Ipopt::Number *x, bool new_x, Ipopt::Index m, Ipopt::Number *g)
overload to return the values of the left-hand side of the constraints
void set_dist_to_point_obj(size_t n, const Ipopt::Number *vals, const Ipopt::Index *inds)
Set the point to which distance is minimized.
virtual bool get_bounds_info(Ipopt::Index n, Ipopt::Number *x_l, Ipopt::Number *x_u, Ipopt::Index m, Ipopt::Number *g_l, Ipopt::Number *g_u)
This call is just passed onto tnlp_.
virtual bool get_nlp_info(Ipopt::Index &n, Ipopt::Index &m, Ipopt::Index &nnz_jac_g, Ipopt::Index &nnz_h_lag, Ipopt::TNLP::IndexStyleEnum &index_style)
get info from nlp_ and add hessian information
A small wrap around std::vector to give easy access to array for interfacing with fortran code.
(C) Copyright International Business Machines Corporation 2007
U * GetRawPtr(const SmartPtr< U > &smart_ptr)