I have an Excel workbook that is using the Solver add-in to maximize a set of equations with a square root in it (e.g. it's non-linear). I'm attempting to re-implement this in C# using Microsoft Solver Foundation. I've tried a few different directives but have not been able to find a solver that reproduces the results i get in Excel.
I tried using Hybrid Local Search, but the results come out all wrong and the resulting maximization doesn't come close to excel. If I use the Interior Point Method and remove the square root (from both excel and c#), I get pretty close to the excel optimization but this is of no use to me since I'm trying to match the excel model that includes the square root.
I think the problem with the Hybrid local search is that i'm not getting a global maximum. I didn't find any other built in directives that would support NLP.
I think Excel Solver uses the GRG2 algorithm. Is there any way that I can reproduce the algorithm used by Excel solver in MSF?
For reference, here's the QP example that came with MSF with the changes that I've made preceded by the comment '// #######':
public string Solve()
{
/***************************
/*Construction of the model*
/***************************/
SolverContext context = SolverContext.GetContext();
//For repeating the solution with other minimum returns
context.ClearModel();
//Create an empty model from context
Model portfolio = context.CreateModel();
//Create a string set with stock names
Set setStocks = new Set(Domain.Any, "Stocks");
/****Decisions*****/
//Create decisions bound to the set. There will be as many decisions as there are values in the set
Decision allocations = new Decision(Domain.RealNonnegative, "Allocations", setStocks);
allocations.SetBinding(StocksHistory, "Allocation", "Stock");
portfolio.AddDecision(allocations);
/***Parameters***/
//Create parameters bound to Covariant matrix
Parameter pCovariants = new Parameter(Domain.Real, "Covariants", setStocks, setStocks);
pCovariants.SetBinding(Covariants, "Variance", "StockI", "StockJ");
//Create parameters bound to mean performance of the stocks over 12 month period
Parameter pMeans = new Parameter(Domain.Real, "Means", setStocks);
pMeans.SetBinding(StocksHistory, "Mean", "Stock");
portfolio.AddParameters(pCovariants, pMeans);
/***Constraints***/
//Portion of a stock should be between 0 and 1
portfolio.AddConstraint("portion", Model.ForEach(setStocks, stock => 0 <= allocations[stock] <= 1));
//Sum of all allocations should be equal to unity
portfolio.AddConstraint("SumPortions", Model.Sum(Model.ForEach(setStocks, stock => allocations[stock])) == 1);
/***Goals***/
portfolio.AddGoal("Variance", GoalKind.Maximize,
// ####### Include a inner product of the means and weights to form utility curve
Model.Sum
(
Model.ForEach
(
setStocks, x =>
Model.Product(
allocations[x],
pMeans[x])
)
)
-
// ####### Use square root of variance to get volatility instead. This makes the problem non-linear
Model.Sqrt(
Model.Sum
(
Model.ForEach
(
setStocks, stockI =>
Model.ForEach
(
setStocks, stockJ =>
Model.Product(pCovariants[stockI, stockJ], allocations[stockI], allocations[stockJ])
)
)
)
)
);
// ####### remove event handler
/*******************
/*Solve the model *
/*******************/
// ####### Use an NLP algorithm directive
Solution solution = context.Solve(new HybridLocalSearchDirective());
// ####### remove conditions on propagate
context.PropagateDecisions();
// ####### Remove save. Can't save an NLP Model
Report report = solution.GetReport();
return report.ToString();
}