Skip to content

Commit 10465c0

Browse files
committed
Make quadratic jacobian merge entries for the same variable
1 parent 5116ca6 commit 10465c0

File tree

2 files changed

+92
-17
lines changed

2 files changed

+92
-17
lines changed

lib/nleval.cpp

Lines changed: 72 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -71,27 +71,69 @@ void LinearQuadraticEvaluator::analyze_jacobian_structure(size_t &m_jacobian_nnz
7171
auto &f = quadratic_constraints[i];
7272
auto row = quadratic_constraint_indices[i];
7373
auto N = f.size();
74+
75+
Hashmap<IndexT, size_t> variable_to_jacobian_nnz;
76+
7477
for (size_t j = 0; j < N; j++)
7578
{
7679
auto x1 = f.variable_1s[j];
7780
auto x2 = f.variable_2s[j];
7881
if (x1 == x2)
7982
{
80-
m_jacobian_rows.push_back(row);
81-
m_jacobian_cols.push_back(x1);
82-
jacobian_linear_terms.emplace_back(2.0 * f.coefficients[j], x1, m_jacobian_nnz);
83-
m_jacobian_nnz += 1;
83+
auto result = variable_to_jacobian_nnz.insert({x1, m_jacobian_nnz});
84+
auto iter = result.first;
85+
auto has_inserted = result.second;
86+
87+
if (has_inserted)
88+
{
89+
m_jacobian_rows.push_back(row);
90+
m_jacobian_cols.push_back(x1);
91+
jacobian_linear_terms.emplace_back(2.0 * f.coefficients[j], x1, m_jacobian_nnz);
92+
m_jacobian_nnz += 1;
93+
}
94+
else
95+
{
96+
auto nnz = iter->second;
97+
jacobian_linear_terms.emplace_back(2.0 * f.coefficients[j], x1, nnz);
98+
}
8499
}
85100
else
86101
{
87-
m_jacobian_rows.push_back(row);
88-
m_jacobian_cols.push_back(x1);
89-
jacobian_linear_terms.emplace_back(f.coefficients[j], x2, m_jacobian_nnz);
90-
m_jacobian_nnz += 1;
91-
m_jacobian_rows.push_back(row);
92-
m_jacobian_cols.push_back(x2);
93-
jacobian_linear_terms.emplace_back(f.coefficients[j], x1, m_jacobian_nnz);
94-
m_jacobian_nnz += 1;
102+
{
103+
auto result = variable_to_jacobian_nnz.insert({x1, m_jacobian_nnz});
104+
auto iter = result.first;
105+
auto has_inserted = result.second;
106+
107+
if (has_inserted)
108+
{
109+
m_jacobian_rows.push_back(row);
110+
m_jacobian_cols.push_back(x1);
111+
jacobian_linear_terms.emplace_back(f.coefficients[j], x2, m_jacobian_nnz);
112+
m_jacobian_nnz += 1;
113+
}
114+
else
115+
{
116+
auto nnz = iter->second;
117+
jacobian_linear_terms.emplace_back(f.coefficients[j], x2, nnz);
118+
}
119+
}
120+
{
121+
auto result = variable_to_jacobian_nnz.insert({x2, m_jacobian_nnz});
122+
auto iter = result.first;
123+
auto has_inserted = result.second;
124+
if (has_inserted)
125+
{
126+
m_jacobian_rows.push_back(row);
127+
m_jacobian_cols.push_back(x2);
128+
jacobian_linear_terms.emplace_back(f.coefficients[j], x1, m_jacobian_nnz);
129+
m_jacobian_nnz += 1;
130+
}
131+
else
132+
{
133+
auto nnz = iter->second;
134+
jacobian_linear_terms.emplace_back(f.coefficients[j], x1, nnz);
135+
}
136+
}
95137
}
96138
}
97139
if (f.affine_part)
@@ -100,11 +142,25 @@ void LinearQuadraticEvaluator::analyze_jacobian_structure(size_t &m_jacobian_nnz
100142
auto N = af.size();
101143
for (size_t j = 0; j < N; j++)
102144
{
103-
m_jacobian_rows.push_back(row);
104-
m_jacobian_cols.push_back(af.variables[j]);
105-
jacobian_constants.emplace_back(af.coefficients[j], m_jacobian_nnz + j);
145+
auto x = af.variables[j];
146+
147+
auto result = variable_to_jacobian_nnz.insert({x, m_jacobian_nnz});
148+
auto iter = result.first;
149+
auto has_inserted = result.second;
150+
151+
if (has_inserted)
152+
{
153+
m_jacobian_rows.push_back(row);
154+
m_jacobian_cols.push_back(x);
155+
jacobian_constants.emplace_back(af.coefficients[j], m_jacobian_nnz);
156+
m_jacobian_nnz += 1;
157+
}
158+
else
159+
{
160+
auto nnz = iter->second;
161+
jacobian_constants.emplace_back(af.coefficients[j], nnz);
162+
}
106163
}
107-
m_jacobian_nnz += N;
108164
}
109165
}
110166
}
@@ -170,7 +226,6 @@ void LinearQuadraticEvaluator::analyze_sparse_gradient_structure(
170226
{
171227
size_t grad_index =
172228
add_gradient_column(x1, gradient_nnz, gradient_cols, gradient_index_map);
173-
;
174229
gradient_linear_terms.emplace_back(c, x2, grad_index);
175230
grad_index =
176231
add_gradient_column(x2, gradient_nnz, gradient_cols, gradient_index_map);

tests/test_nlp_bilinear.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
import pyoptinterface as poi
2+
3+
import pytest
4+
5+
6+
def test_bilinear(ipopt_model_ctor):
7+
model = ipopt_model_ctor()
8+
9+
x = model.add_m_variables(3, lb=0.0)
10+
11+
obj = -(x[0] + x[1]) * (x[0] + x[2])
12+
13+
expr = (x[0] + x[1]) * (x[0] + x[1]) + (x[0] + x[2]) * (x[0] + x[2])
14+
model.add_quadratic_constraint(expr, poi.Eq, 4.0)
15+
16+
model.set_objective(obj)
17+
model.optimize()
18+
19+
objective_value = model.get_model_attribute(poi.ModelAttribute.ObjectiveValue)
20+
assert objective_value == pytest.approx(-2.0, abs=1e-8)

0 commit comments

Comments
 (0)