« Previous « Start » Next »
4 NLP Problem
TOMNET requires that general nonlinear problems are defined in .NET
classes's. The function to be optimized must always be supplied. It
is recommended that the user supply as many analytical functions
(i.e. the gradient and constraint Jacobian) as possible, since this
greatly increases robustness and results in a faster execution time.
The constrained nonlinear programming problem is defined as:
|
|
f(x) |
|
|
s/t |
xL |
≤ |
x |
≤ |
xU |
bL |
≤ |
A x |
≤ |
bU |
cL |
≤ |
c(x) |
≤ |
cU |
|
|
(5) |
where
x,
xL,
xU Rn,
f(
x)
R,
A Rm1 × n,
bL,
bU Rm1 and
cL,
c(
x),
cU Rm2.
Example problem:
|
|
f(x)=α ( x2−x12)2+(1−x1 )2 |
|
|
s/t |
−10 |
≤ |
x1 |
≤ |
2 |
−10 |
≤ |
x2 |
≤ |
2 |
−inf |
≤ |
x1*x2 |
≤ |
0.5 |
α=100 |
|
|
(6) |
The following code (maintained from the solution file:
quickguide/nlpQG/nlpQG.sln) defines the problem in TOMNET.
using System;
using TOMNET;
namespace TOMNET
{
/// <summary>
/// Class for nonlinear constraints.
/// Inherited from IDConstarints.
/// </summary>
public class nlpConstraints : IDConstraints
{
/// <summary>
/// Nonlinear constraints.
/// </summary>
/// <param name="c">Nonlinear constraint vector.</param>
/// <param name="x">The decision variables.</param>
public void Evaluate(double[] c, double[] x)
{
c[0] = -Math.Pow(x[0], 2) - x[1];
}
/// <summary>
/// Alternative version.
/// </summary>
/// <param name="x">The decision variables.</param>
/// <returns>Nonlinear constraint vector.</returns>
public double[] Evaluate(double[] x)
{
double[] c = new double[1];
this.Evaluate(c, x);
return c;
}
/// <summary>
/// Nonlinear constraint Jacobian (first derivative).
/// </summary>
/// <param name="Jac"></param>
/// <param name="x"></param>
public void dc(Jacobian Jac, double[] x)
{
Jac[0, 0] = -2 * x[0];
Jac[0, 1] = -1;
}
}
/// <summary>
/// Class for nonlinear function.
/// Inherited from IDFunction.
/// </summary>
public class nlpFunction : IDFunction
{
/// <summary>
/// Constant used in objective function and gradient.
/// </summary>
public double alpha;
/// <summary>
/// Constructor for non linear objective function needed
/// since a constant must be set.
/// </summary>
/// <param name="alpha">Parameter alpha used
/// in objective function and gradient.</param>
public nlpFunction(double alpha)
{
this.alpha = alpha;
}
/// <summary>
/// Objective function.
/// </summary>
/// <param name="fx">Value of the objective function.</param>
/// <param name="x">The decision variables.</param>
public void Evaluate(double[] fx, double[] x)
{
fx[0] = alpha * Math.Pow(x[1] - Math.Pow(x[0], 2), 2) +
Math.Pow((1 - x[0]), 2);
}
/// <summary>
/// Gradient (first derivative) for the objective function.
/// </summary>
/// <param name="g">Values of the gradient.</param>
/// <param name="x">The decision variables.</param>
public void Grad(double[] g, double[] x)
{
g[0] = -4 * alpha * x[0] * (x[1] - Math.Pow(x[0], 2)) -
2 * (1 - x[0]);
g[1] = 2 * alpha * (x[1] - Math.Pow(x[0], 2));
}
}
/// <summary>
/// Quick guide class for nonlinear problem (Rosenbrocks Banana).
/// </summary>
public class nlpQG
{
/// <summary>
/// Testprogram for nlpQG.
/// </summary>
static void Main()
{
//
// Constant alpha
//
double alpha = 100.0;
//
// Function and constraints
//
nlpFunction nlpF = new nlpFunction(alpha);
nlpConstraints nlpC = new nlpConstraints();
//
// Starting point, lower and upper bounds
//
double[] x_0 = new double[2] { -1.2, 1.0 };
double[] x_L = new double[2] { -10.0, -10.0 };
double[] x_U = new double[2] { 2.0, 2.0 };
//
// Lower and upper bounds for nonlinear constraints
//
double[] c_L = new double[1] { -1000.0 };
double[] c_U = new double[1] { 0.0 };
//
// Create problem, solver and result.
//
TOMNETProblem Prob = new ConProblem(nlpF, nlpC, c_L, c_U, x_L, x_U,
x_0, "RBB", double.NegativeInfinity, null, null);
SNOPT solver = new SNOPT();
Result result;
//
// Define a print file for SNOPT
//
string printfilename = "SnoptNlpQG.txt";
solver.Options.PrintFile = printfilename;
//
// Solve the problem
//
solver.Solve(Prob, out result);
//
// Get the solution results
//
double[] solution = result.x_k;
double objective = result.f_k;
//
// Display the solution on the console.
//
Console.WriteLine(" * * * * * * * *");
Console.WriteLine(" * Solved " + Prob.Name);
Console.WriteLine(" * Printfile: " + printfilename);
Console.WriteLine(" * Objective: {0}", objective);
Console.WriteLine(" * Solution");
Console.WriteLine(" x_L[i] <= x_k[i] <= x_U[i]");
for (int i = 0; i < Prob.N; i++)
{
Console.WriteLine(" {0,10:g3} <= {1,10:g3} <= {2,10:g3} ",
Prob.x_L[i], solution[i], Prob.x_U[i]);
}
Console.WriteLine(" * Nonlinear constraint");
Console.WriteLine(" c_L[i] <= cx[i] <= c_U[i]");
double[] cx = new double[Prob.mNonLin];
Prob.c(cx, result.x_k);
for (int i = 0; i < Prob.mNonLin; i++)
{
Console.WriteLine(" {0,10:g3} <= {1,10:g3} <= {2,10:g3}",
Prob.c_L[i], cx[i], Prob.c_U[i]);
}
}
}
}
Observe that the code defines four functions that are used in the
callback from the solvers.
f: Function value
g: Gradient vector
c: Nonlinear constraint vector
dc: Nonlinear constraint gradient matrix
The
nlpQG.sln can be opened by a suitable editor and compiled
to an exe file. When running the executable from a command prompt
the following output results are displayed:
« Previous « Start » Next »