Skip to content

Commit

Permalink
use original constraint sides for get_curr_violations(), fixes coin-o…
Browse files Browse the repository at this point in the history
…r#603

- for bounds violations, original bounds were already used
  • Loading branch information
svigerske committed Aug 30, 2022
1 parent 96e88f7 commit cb942d9
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 4 deletions.
2 changes: 2 additions & 0 deletions ChangeLog.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ More detailed information about incremental changes can be found in the
If set, the gradient of the objective will be requested by NLP only once. [#597]
- Added `OrigIpoptNLP::orig_d_L()` and `OrigIpoptNLP::orig_d_U()` to get
original constraint sides (before relaxation due to bound_relax_factor > 0).
- `TNLP::get_curr_violations()` now returns the constraint violation and
complementarity w.r.t. the original (non-relaxed) constraint sides. [#603]

### 3.14.9 (2022-07-21)

Expand Down
29 changes: 25 additions & 4 deletions src/Interfaces/IpTNLP.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -924,11 +924,22 @@ bool TNLP::get_curr_violations(

// violation of d_L <= d(x) -> compute d_L - d first
SmartPtr<Vector> d_viol_L;
if( orignlp->d_L()->Dim() > 0 )
SmartPtr<const Vector> d_L;
d_L = orignlp->orig_d_L();
if( IsValid(d_L) )
{
SmartPtr<Vector> tmp = orignlp->d_L()->MakeNewCopy();
// orig_d_L is unscaled, but we need the scaled one below (because d is scaled)
if( orignlp->NLP_scaling()->have_d_scaling() )
d_L = orignlp->NLP_scaling()->apply_vector_scaling_d_NonConst(d_L);
}
else // if no relaxation, then orig_d_L() returns NULL, use d_L instead
d_L = orignlp->d_L();
if( d_L->Dim() > 0 )
{
SmartPtr<Vector> tmp = d_L->MakeNewCopy();
d_viol_L = d->MakeNew();
d_viol_L->Set(0.);

orignlp->Pd_L()->TransMultVector(-1., *d, 1., *tmp); // tmp := -P^Td + d_L, scaled
orignlp->Pd_L()->MultVector(1., *tmp, 0., *d_viol_L); // d_viol_L := P(d_L - P^Td), scaled
if( !scaled && orignlp->NLP_scaling()->have_d_scaling() )
Expand All @@ -944,9 +955,19 @@ bool TNLP::get_curr_violations(

// violation of d(x) <= d_U -> compute d - d_U first
SmartPtr<Vector> d_viol_U;
if( orignlp->d_U()->Dim() > 0 )
SmartPtr<const Vector> d_U;
d_U = orignlp->orig_d_U();
if( IsValid(d_U) )
{
// orig_d_U is unscaled, but we need the scaled one below (because d is scaled)
if( orignlp->NLP_scaling()->have_d_scaling() )
d_U = orignlp->NLP_scaling()->apply_vector_scaling_d_NonConst(d_U);
}
else // if no relaxation, then orig_d_U() returns NULL, use d_U instead
d_U = orignlp->d_U();
if( d_U->Dim() > 0 )
{
SmartPtr<Vector> tmp = orignlp->d_U()->MakeNewCopy();
SmartPtr<Vector> tmp = d_U->MakeNewCopy();
d_viol_U = d->MakeNew();
d_viol_U->Set(0.);
orignlp->Pd_U()->TransMultVector(1., *d, -1., *tmp); // tmp := P^Td - d_U, scaled
Expand Down

0 comments on commit cb942d9

Please sign in to comment.