5.1.3. C++ 的建模和优化¶
在本节中,我们将使用 MindOpt C++ 语言的 API 来建模以及求解 线性规划问题示例 中的问题。
5.1.3.1. 按行输入:MdoLoEx1¶
首先,引入头文件:
24#include "MindoptCpp.h"
并创建优化模型:
30 /*------------------------------------------------------------------*/
31 /* Step 1. Create a model and change the parameters. */
32 /*------------------------------------------------------------------*/
33 /* Create an empty model. */
34 MdoModel model;
接下来,我们通过 mindopt::MdoModel::setIntAttr()
将目标函数设置为 最小化 ,并调用 mindopt::MdoModel::addVar()
来添加四个优化变量,定义其下界、上界、名称和类型(有关 mindopt::MdoModel::setIntAttr()
和 mindopt::MdoModel::addVar()
的详细使用方式,请参考 C++ 接口函数):
38 /*------------------------------------------------------------------*/
39 /* Step 2. Input model. */
40 /*------------------------------------------------------------------*/
41 /* Change to minimization problem. */
42 model.setIntAttr(MDO_INT_ATTR::MIN_SENSE, MDO_YES);
43
44 /* Add variables. */
45 std::vector<MdoVar> x;
46 x.push_back(model.addVar(0.0, 10.0, 1.0, "x0", MDO_NO));
47 x.push_back(model.addVar(0.0, MDO_INFINITY, 1.0, "x1", MDO_NO));
48 x.push_back(model.addVar(0.0, MDO_INFINITY, 1.0, "x2", MDO_NO));
49 x.push_back(model.addVar(0.0, MDO_INFINITY, 1.0, "x3", MDO_NO));
接着,我们开始添加线性约束:
51 /* Add constraints. */
52 model.addCons(1.0, MDO_INFINITY, 1.0 * x[0] + 1.0 * x[1] + 2.0 * x[2] + 3.0 * x[3], "c0");
53 model.addCons(1.0, 1.0, 1.0 * x[0] - 1.0 * x[2] + 6.0 * x[3], "c1");
问题输入完成后,再调用 mindopt::MdoModel::solveProb()
求解优化问题,并通过 mindopt::MdoModel::displayResults()
查看优化结果:
55 /*------------------------------------------------------------------*/
56 /* Step 3. Solve the problem and populate the result. */
57 /*------------------------------------------------------------------*/
58 /* Solve the problem. */
59 model.solveProb();
60 model.displayResults();
文件链接 MdoLoEx1.cpp 提供了完整源代码:
1/**
2 * Description
3 * -----------
4 *
5 * Linear optimization (row-wise input).
6 *
7 * Formulation
8 * -----------
9 *
10 * Minimize
11 * obj: 1 x0 + 1 x1 + 1 x2 + 1 x3
12 * Subject To
13 * c0 : 1 x0 + 1 x1 + 2 x2 + 3 x3 >= 1
14 * c1 : 1 x0 - 1 x2 + 6 x3 = 1
15 * Bounds
16 * 0 <= x0 <= 10
17 * 0 <= x1
18 * 0 <= x2
19 * 0 <= x3
20 * End
21 */
22#include <iostream>
23#include <vector>
24#include "MindoptCpp.h"
25
26using namespace mindopt;
27
28int main(void)
29{
30 /*------------------------------------------------------------------*/
31 /* Step 1. Create a model and change the parameters. */
32 /*------------------------------------------------------------------*/
33 /* Create an empty model. */
34 MdoModel model;
35
36 try
37 {
38 /*------------------------------------------------------------------*/
39 /* Step 2. Input model. */
40 /*------------------------------------------------------------------*/
41 /* Change to minimization problem. */
42 model.setIntAttr(MDO_INT_ATTR::MIN_SENSE, MDO_YES);
43
44 /* Add variables. */
45 std::vector<MdoVar> x;
46 x.push_back(model.addVar(0.0, 10.0, 1.0, "x0", MDO_NO));
47 x.push_back(model.addVar(0.0, MDO_INFINITY, 1.0, "x1", MDO_NO));
48 x.push_back(model.addVar(0.0, MDO_INFINITY, 1.0, "x2", MDO_NO));
49 x.push_back(model.addVar(0.0, MDO_INFINITY, 1.0, "x3", MDO_NO));
50
51 /* Add constraints. */
52 model.addCons(1.0, MDO_INFINITY, 1.0 * x[0] + 1.0 * x[1] + 2.0 * x[2] + 3.0 * x[3], "c0");
53 model.addCons(1.0, 1.0, 1.0 * x[0] - 1.0 * x[2] + 6.0 * x[3], "c1");
54
55 /*------------------------------------------------------------------*/
56 /* Step 3. Solve the problem and populate the result. */
57 /*------------------------------------------------------------------*/
58 /* Solve the problem. */
59 model.solveProb();
60 model.displayResults();
61 }
62 catch (MdoException & e)
63 {
64 std::cerr << "===================================" << std::endl;
65 std::cerr << "Error : code <" << e.getResult() << ">" << std::endl;
66 std::cerr << "Reason : " << model.explainResult(e.getResult()) << std::endl;
67 std::cerr << "===================================" << std::endl;
68
69 return static_cast<int>(e.getResult());
70 }
71
72 return static_cast<int>(MDO_OKAY);
73}
5.1.3.2. 按列输入:MdoLoEx2¶
在下面的代码中,我们依然对上述问题进行建模,但改以 按列排列 的方式输入矩阵的非零元。
在时调用 mindopt::MdoModel::addCons()
时,仅输入约束的下界(left-hand-side;LHS)和上界(right-hand-side;RHS),约束矩阵则为空(无非零元素)。待约束输入后,再创建 列对象 mindopt::MdoCol
来保存该列在各约束中相对应的非零元位置(索引)和非零值。最后,调用 mindopt::MdoModel::addVar()
创建变量,并输入其相应的目标函数系数、下界和上界、各约束在此列中相对应的非零元、变量名以及变量类型。
此外,我们还调用 mindopt::MdoModel::getStatus()
来检查求解器的优化状态,并通过 mindopt::MdoModel::getRealAttr()
和 mindopt::MdoVar::getRealAttr()
来分别获取目标值和最优解(关于 mindopt::MdoModel::getRealAttr()
和 mindopt::MdoVar::getRealAttr()
的详细使用方式,请参考 C++ 接口函数)。
文件链接 MdoLoEx2.cpp 提供了完整源代码:
1/**
2 * Description
3 * -----------
4 *
5 * Linear optimization (column-wise input).
6 *
7 * Formulation
8 * -----------
9 *
10 * Minimize
11 * obj: 1 x0 + 1 x1 + 1 x2 + 1 x3
12 * Subject To
13 * c0 : 1 x0 + 1 x1 + 2 x2 + 3 x3 >= 1
14 * c1 : 1 x0 - 1 x2 + 6 x3 = 1
15 * Bounds
16 * 0 <= x0 <= 10
17 * 0 <= x1
18 * 0 <= x2
19 * 0 <= x3
20 * End
21 */
22#include <iostream>
23#include <vector>
24#include "MindoptCpp.h"
25
26using namespace mindopt;
27
28int main(void)
29{
30 /*------------------------------------------------------------------*/
31 /* Step 1. Create a model and change the parameters. */
32 /*------------------------------------------------------------------*/
33 /* Create an empty model. */
34 MdoModel model;
35
36 try
37 {
38 /*------------------------------------------------------------------*/
39 /* Step 2. Input model. */
40 /*------------------------------------------------------------------*/
41 /* Change to minimization problem. */
42 model.setIntAttr(MDO_INT_ATTR::MIN_SENSE, MDO_YES);
43
44 /* Add empty constraints. */
45 std::vector<MdoCons> cons;
46 cons.push_back(model.addCons(1.0, MDO_INFINITY, "c0"));
47 cons.push_back(model.addCons(1.0, 1.0, "c1"));
48
49 /* Input columns. */
50 std::vector<MdoCol> col(4);
51 col[0].addTerm(cons[0], 1.0);
52 col[0].addTerm(cons[1], 1.0);
53 col[1].addTerm(cons[0], 1.0);
54 col[2].addTerm(cons[0], 2.0);
55 col[2].addTerm(cons[1], -1.0);
56 col[3].addTerm(cons[0], 3.0);
57 col[3].addTerm(cons[1], 6.0);
58
59 /* Add variables. */
60 std::vector<MdoVar> x;
61 x.push_back(model.addVar(0.0, 10.0, 1.0, col[0], "x0", MDO_NO));
62 x.push_back(model.addVar(0.0, MDO_INFINITY, 1.0, col[1], "x1", MDO_NO));
63 x.push_back(model.addVar(0.0, MDO_INFINITY, 1.0, col[2], "x2", MDO_NO));
64 x.push_back(model.addVar(0.0, MDO_INFINITY, 1.0, col[3], "x3", MDO_NO));
65
66 /*------------------------------------------------------------------*/
67 /* Step 3. Solve the problem and populate the result. */
68 /*------------------------------------------------------------------*/
69 /* Solve the problem. */
70 model.solveProb();
71 model.displayResults();
72
73 switch (model.getStatus())
74 {
75 case MDO_UNKNOWN:
76 std::cout << "Optimizer terminated with an UNKNOWN status." << std::endl;
77 break;
78 case MDO_OPTIMAL:
79 std::cout << "Optimizer terminated with an OPTIMAL status." << std::endl;
80 std::cout << " - Primal objective : " << model.getRealAttr(MDO_REAL_ATTR::PRIMAL_OBJ_VAL) << std::endl;
81 for (int j = 0; j < 4; ++j)
82 {
83 std::cout << "x[" << j << "] = " << x[j].getRealAttr(MDO_REAL_ATTR::PRIMAL_SOLN) << std::endl;
84 }
85 break;
86 case MDO_INFEASIBLE:
87 std::cout << "Optimizer terminated with an INFEASIBLE status." << std::endl;
88 break;
89 case MDO_UNBOUNDED:
90 std::cout << "Optimizer terminated with an UNBOUNDED status." << std::endl;
91 break;
92 case MDO_INF_OR_UBD:
93 std::cout << "Optimizer terminated with an INFEASIBLE or UNBOUNDED status." << std::endl;
94 break;
95 }
96 }
97 catch (MdoException & e)
98 {
99 std::cerr << "===================================" << std::endl;
100 std::cerr << "Error : code <" << e.getResult() << ">" << std::endl;
101 std::cerr << "Reason : " << model.explainResult(e.getResult()) << std::endl;
102 std::cerr << "===================================" << std::endl;
103
104 return static_cast<int>(e.getResult());
105 }
106
107 return static_cast<int>(MDO_OKAY);
108}
5.1.3.3. 进阶使用示例:MdoLoEx3¶
在下面的代码中,我们展示了其他进阶 API 的使用方式,如:输入模型、修改模型、获取基解、热启动的例子;关于 API 的完整使用方法,请参考 完整的API说明 。
文件链接 MdoLoEx3.cpp 提供了完整源代码:
1/**
2 * Description
3 * -----------
4 *
5 * Linear optimization.
6 * - Row input.
7 * - Column input.
8 * - Query.
9 */
10#include <iostream>
11#include <vector>
12#include "MindoptCpp.h"
13
14#define WRITE_LP
15#define MY_FOLDER "./"
16
17using namespace mindopt;
18
19int main(void)
20{
21 /*------------------------------------------------------------------*/
22 /* Step 1. Create a model and change the parameters. */
23 /*------------------------------------------------------------------*/
24 /* Create an empty model. */
25 MdoModel model;
26
27 try
28 {
29 /*------------------------------------------------------------------*/
30 /* Step 2. Input model. */
31 /*------------------------------------------------------------------*/
32 std::cout << std::endl << "Step 2. Input model." << std::endl << std::endl;
33 /* Change to minimization problem. */
34 model.setIntAttr(MDO_INT_ATTR::MIN_SENSE, MDO_YES);
35
36 /* Add variables. */
37 std::vector<std::reference_wrapper<MdoVar> > x;
38 x.push_back(model.addVar(0.0, 10.0, 1.0, "x0", MDO_NO));
39 x.push_back(model.addVar(0.0, MDO_INFINITY, 1.0, "x1", MDO_NO));
40 x.push_back(model.addVar(0.0, MDO_INFINITY, 1.0, "x2", MDO_NO));
41 x.push_back(model.addVar(0.0, MDO_INFINITY, 1.0, "x3", MDO_NO));
42
43 /* Add constraints. */
44 std::vector<std::reference_wrapper<MdoCons> > conss;
45 conss.push_back(model.addCons(1.0 * x[0] + 1.0 * x[1] + 2.0 * x[2] + 3.0 * x[3] >= 1.0, "c0"));
46 conss.push_back(model.addCons(1.0 * x[0] - 1.0 * x[2] + 6.0 * x[3] == 1.0, "c1"));
47
48 /*------------------------------------------------------------------*/
49 /* Step 3. Solve the problem and populate the result. */
50 /*------------------------------------------------------------------*/
51 std::cout << std::endl << "Step 3. Solve the problem and populate the result." << std::endl << std::endl;
52 /* Solve the problem. */
53 model.solveProb();
54 model.displayResults();
55#ifdef WRITE_LP
56 model.writeProb(MY_FOLDER "Step3.lp");
57#endif
58
59 /*------------------------------------------------------------------*/
60 /* Step 4. Add another two variables and then resolve the problem. */
61 /*------------------------------------------------------------------*/
62 std::cout << std::endl << "Step 4. Add another two variables and then resolve the problem." << std::endl << std::endl;
63 double lbs2[] = { 0, -2 };
64 double ubs2[] = { MDO_INFINITY, MDO_INFINITY };
65 double objs2[] = { 1, -1 };
66 MdoCol cols[2];
67 double vals[2][2] =
68 {
69 { 1.0, 2.0 },
70 { 3.0, 4.0}
71 };
72 cols[0].addTerms(conss, vals[0], 2);
73 cols[1].addTerms(conss, vals[1], 2);
74 std::cout << cols[0] << std::endl;
75 std::vector<std::reference_wrapper<MdoVar> > y;
76 y = model.addVars(2, lbs2, ubs2, objs2, cols, "y", NULL);
77
78 /* Solve the problem. */
79 model.solveProb();
80 model.displayResults();
81#ifdef WRITE_LP
82 model.writeProb(MY_FOLDER "Step4.lp");
83#endif
84
85 /*------------------------------------------------------------------*/
86 /* Step 5. Add another two constraints and then resolve the problem.*/
87 /*------------------------------------------------------------------*/
88 std::cout << std::endl << "Step 5. Add another two constraints and then resolve the problem." << std::endl << std::endl;
89
90 int bgn2[] = {0, 3, 6};
91 int indices2[] =
92 {
93 0, 1, 3,
94 0, 2, 3
95 };
96 double values2[] =
97 {
98 1.0, 1.0, -2.0,
99 1.0, -2.0, 6.0
100 };
101
102 const double lhss2[] = { 0, 1 };
103 const double rhss2[] = { 2, MDO_INFINITY };
104
105 MdoExprLinear expr[2];
106 for (int i = 0; i < 2; ++i)
107 {
108 for (int e = bgn2[i]; e < bgn2[i + 1]; ++e)
109 {
110 expr[i] += values2[e] * x[indices2[e]];
111 }
112 }
113
114 std::vector<std::reference_wrapper<MdoCons> > new_conss = model.addConss(2, lhss2, rhss2, expr, "NewConss");
115 std::cout << conss.size() << std::endl;
116 conss.insert(conss.end(), new_conss.begin(), new_conss.end());
117 std::cout << conss.size() << std::endl;
118 std::cout << x.size() << std::endl;
119
120 /* Solve the problem. */
121 model.solveProb();
122 model.displayResults();
123#ifdef WRITE_LP
124 model.writeProb(MY_FOLDER "Step5.lp");
125#endif
126
127 /*------------------------------------------------------------------*/
128 /* Step 6. Obtain optimal basis. */
129 /*------------------------------------------------------------------*/
130 std::cout << std::endl << "Step 6. Obtain optimal basis." << std::endl << std::endl;
131 /*
132 * isFree = 0,
133 * basic = 1,
134 * atUpperBound = 2,
135 * atLowerBound = 3,
136 * superBasic = 4,
137 * isFixed = 5,
138 */
139 std::vector<int> col_basis;
140 std::vector<int> row_basis;
141 for (auto var : x)
142 {
143 std::cout << "Basis status of variable " << var.get().getIndex() << " is " << var.get().getIntAttr(MDO_INT_ATTR::COL_BASIS) << std::endl;
144 col_basis.push_back(var.get().getIntAttr(MDO_INT_ATTR::COL_BASIS));
145 }
146 for (auto var : y)
147 {
148 std::cout << "Basis status of variable " << var.get().getIndex() << " is " << var.get().getIntAttr(MDO_INT_ATTR::COL_BASIS) << std::endl;
149 col_basis.push_back(var.get().getIntAttr(MDO_INT_ATTR::COL_BASIS));
150 }
151 for (auto cons : conss)
152 {
153 std::cout << "Basis status of constraint " << cons.get().getIndex() << " is " << cons.get().getIntAttr(MDO_INT_ATTR::ROW_BASIS) << std::endl;
154 row_basis.push_back(cons.get().getIntAttr(MDO_INT_ATTR::ROW_BASIS));
155 }
156#ifdef WRITE_LP
157 model.writeProb(MY_FOLDER "Step6.lp");
158 model.writeSoln(MY_FOLDER "Step6.bas");
159#endif
160
161 /*------------------------------------------------------------------*/
162 /* Step 7. Warm-start Simplex. */
163 /*------------------------------------------------------------------*/
164 std::cout << std::endl << "Step 7. Warm-start Simplex." << std::endl << std::endl;
165
166 /* Change the objective coefficients. */
167 x[1].get().setRealAttr("Obj", 3.0);
168 x[2].get().setRealAttr("Obj", -3.0);
169
170 /* Load the basis. */
171 model.setIntAttrArray(MDO_INT_ATTR::ROW_BASIS, 0, row_basis.size(), row_basis.data());
172 model.setIntAttrArray(MDO_INT_ATTR::COL_BASIS, 0, col_basis.size(), col_basis.data());
173
174 /* Solve the problem. */
175 model.setIntParam(MDO_INT_PARAM::METHOD, 0);
176 model.solveProb();
177 model.displayResults();
178#ifdef WRITE_LP
179 model.writeProb(MY_FOLDER "Step7.lp");
180#endif
181
182 /*------------------------------------------------------------------*/
183 /* Step 8. Model query. */
184 /*------------------------------------------------------------------*/
185 std::cout << std::endl << "Step 8. Model query." << std::endl << std::endl;
186
187 /* Query 1: Retrieve first constraint. */
188 std::cout << "Query 1: Retrieve first constraint." << std::endl;
189
190 MdoExprLinear temp_expr;
191 temp_expr = model.getExprLinear(conss[0]);
192 std::cout << temp_expr << std::endl;
193
194 /* Query 2: Retrieve second column. */
195 std::cout << "Query 2: Retrieve second column." << std::endl;
196
197 MdoCol temp_col;
198 temp_col = model.getCol(x[1]);
199 std::cout << temp_col << std::endl;
200
201 }
202 catch (MdoException & e)
203 {
204 std::cerr << "===================================" << std::endl;
205 std::cerr << "Error : code <" << e.getResult() << ">" << std::endl;
206 std::cerr << "Reason : " << model.explainResult(e.getResult()) << std::endl;
207 std::cerr << "===================================" << std::endl;
208
209 return static_cast<int>(e.getResult());
210 }
211
212 return static_cast<int>(MDO_OKAY);
213}