├── Topologies
├── ext
│ └── modularity
│ │ ├── .gitignore
│ │ └── FastCommunity_w_GPL_v1.0.1
│ │ ├── .gitignore
│ │ ├── test2-fc_t2.wpairs
│ │ ├── test1-fc_test1.wpairs
│ │ ├── test1.wpairs
│ │ ├── test2.wpairs
│ │ └── Makefile
├── simple.json
├── bottleneck.json
├── two-srcs.json
├── analyze_topo.py
├── toy-network.json
├── feasible1.json
├── bottleneck-dumbell.json
├── dumbell-bottleneck.json
├── partitioning
│ ├── utils.py
│ ├── abstract_partitioning_method.py
│ ├── leader_election.py
│ ├── spectral_clustering.py
│ ├── leader_election_uniform.py
│ └── fm_partitioning.py
├── toy-network-2.json
├── toy-network-3.json
├── parse_and_convert_graphml.py
├── ring_20.json
├── swan.json
├── abilene.json
├── verify_demand.py
├── b4-teavar.json
├── partition_network.py
└── outputs
│ └── paths
│ └── b4-teavar_2.json
├── figures
└── metaopt_workflow.png
├── Directory.Build.props
├── .vscode
├── settings.json
├── launch.json
└── tasks.json
├── .stylecop
├── AnalysisDictionary.xml
├── stylecop.json
└── StyleCop.props
├── MetaOptimize
├── AdversarialGenMethod.cs
├── Exceptions.cs
├── TrafficEngineering
│ ├── PathType.cs
│ ├── TEOptimizationEncoding.cs
│ ├── TEMaxFlowOptimizationSolution.cs
│ ├── TEOptimizationSolution.cs
│ ├── DemandPinningLinkNegativeException.cs
│ ├── AdversarialInputSimplifier.cs
│ ├── DemandPinningQuantizedEncoder.cs
│ └── ModifiedDemandPinningQuantizedEncoder.cs
├── PIFO
│ ├── PIFOOptimizationEncoding.cs
│ ├── PIFOOptimizationSolution.cs
│ ├── PIFOAvgDelayOptimalEncoder.cs
│ ├── SPPIFOAvgDelayEncoder.cs
│ ├── PIFOWithDropAvgDelayEncoder.cs
│ ├── AIFOAvgDelayEncoder.cs
│ ├── ModifiedSPPIFOAvgDelayEncoder.cs
│ ├── SPPIFOWithDropAvgDelayEncoder.cs
│ ├── PIFOWithDropEncoder.cs
│ └── PIFOAdversarialInputGenerator.cs
├── FailureAnalysis
│ ├── CapacityAugmentSolution.cs
│ ├── FailureAnalysisOptimizationSolution.cs
│ └── README.md
├── InnerEncodingMethod.cs
├── VariableType.cs
├── VectorBinPacking
│ ├── VBPOptimizationEncoding.cs
│ ├── FFDMethod.cs
│ ├── VBPOptimizationSolution.cs
│ └── Bins.cs
├── MetaOptimize.csproj
├── OptimizationEncoding.cs
├── IList.cs
├── IDemandList.cs
├── GurobiEnvironment.cs
├── OptimizationSolution.cs
├── PathComparer.cs
├── PairwiseDemandList.cs
├── GenericList.cs
├── GenericDemandList.cs
├── GurobiMin.cs
├── GurobiTerminationCallback.cs
├── GurobiTimeoutCallback.cs
├── McCormickRelaxation.cs
├── GurobiStoreProgressCallback.cs
├── GurobiBinary.cs
└── GurobiCallback.cs
├── CODE_OF_CONDUCT.md
├── MetaOptimize.Cli
├── MachineStat.cs
├── MetaOptimize.Cli.csproj
├── Parser.cs
└── Program.cs
├── .gitattributes
├── MetaOptimize.Test
├── FailureAnalysisGurobiTests.cs
├── PopEncodingTestsORTools.cs
├── DemandPinningGurobiMin.cs
├── PopEncodingTestsGurobiSOS.cs
├── OptimalityGapTestsGurobiSOS.cs
├── PopEncodingTestsGurobi.cs
├── KKtOptimizationTestGurobiMin.cs
├── DemandPinningTestsORTools.cs
├── KktOptimizationTestsGurobiSOS.cs
├── OptimalEncodingTestsGurobiSOS.cs
├── OptimalityGapTestsGurobi.cs
├── PopEncodingTestsGurobiMin.cs
├── DemandPinningTestsGurobiSoS.cs
├── OptimalEncodingGurobiMin.cs
├── OptimalityGapTestGurobiMin.cs
├── OptimalityGapTestsORTools.cs
├── KktOptimizationTestsGurobi.cs
├── OptimalEncodingTestsGurobi.cs
├── PopEncodingTestsGurobiBinary.cs
├── KKTOptimizationTestsORTools.cs
├── OptimalEncodingTestsORTools.cs
├── PopEncodingTestsZen.cs
├── DemandPinningGurobiBinary.cs
├── DemandPinningTestsGurobi.cs
├── KKtOptimizationTestGurobiBinary.cs
├── OptimalEncodingTestsBinary.cs
├── OptimalityGapTestsZen.cs
├── OptimalityGapTestsGurobiBinary.cs
├── KktOptimizationTestsZen.cs
├── DemandPinningTestZen.cs
├── OptimalEncodingTestsZen.cs
├── MetaOptimize.Test.csproj
├── KktOptimizationTests.cs
├── OptimalityGapTests.cs
├── DemandPinningTests.cs
├── TopologyTests.cs
└── PopEncodingTests.cs
├── .github
└── workflows
│ └── dotnet.yml
├── LICENSE
├── SUPPORT.md
├── Directory.Packages.props
├── SECURITY.md
└── MetaOptimize.sln
/Topologies/ext/modularity/.gitignore:
--------------------------------------------------------------------------------
1 | rundir/
2 |
--------------------------------------------------------------------------------
/Topologies/ext/modularity/FastCommunity_w_GPL_v1.0.1/.gitignore:
--------------------------------------------------------------------------------
1 | FastCommunity_wMH
2 |
--------------------------------------------------------------------------------
/Topologies/ext/modularity/FastCommunity_w_GPL_v1.0.1/test2-fc_t2.wpairs:
--------------------------------------------------------------------------------
1 | 0 2 -0.311111
2 | 2 0 -0.311111
3 |
--------------------------------------------------------------------------------
/Topologies/ext/modularity/FastCommunity_w_GPL_v1.0.1/test1-fc_test1.wpairs:
--------------------------------------------------------------------------------
1 | 2 5 -0.392562
2 | 5 2 -0.392562
3 |
--------------------------------------------------------------------------------
/figures/metaopt_workflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/microsoft/MetaOpt/HEAD/figures/metaopt_workflow.png
--------------------------------------------------------------------------------
/Directory.Build.props:
--------------------------------------------------------------------------------
1 |
2 |
3 | true
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "dotnet.defaultSolution": "MetaOptimize.sln",
3 | "debugpy.debugJustMyCode": false,
4 | "jupyter.debugJustMyCode": false,
5 | "csharp.debug.justMyCode": false
6 | }
--------------------------------------------------------------------------------
/.stylecop/AnalysisDictionary.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | kusto
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Topologies/ext/modularity/FastCommunity_w_GPL_v1.0.1/test1.wpairs:
--------------------------------------------------------------------------------
1 | 0 1 1
2 | 0 3 1
3 | 0 4 1
4 | 1 2 1
5 | 1 4 1
6 | 2 3 1
7 | 2 5 1
8 | 5 6 1
9 | 6 7 1
10 | 5 8 1
11 | 6 7 1
12 | 7 8 1
13 |
--------------------------------------------------------------------------------
/Topologies/ext/modularity/FastCommunity_w_GPL_v1.0.1/test2.wpairs:
--------------------------------------------------------------------------------
1 | 0 1 1
2 | 0 3 1
3 | 0 4 1
4 | 1 2 1
5 | 1 4 1
6 | 2 3 1
7 | 2 5 5
8 | 5 6 1
9 | 6 7 1
10 | 5 8 1
11 | 6 7 1
12 | 7 8 1
13 |
--------------------------------------------------------------------------------
/MetaOptimize/AdversarialGenMethod.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | ///
4 | /// The method for generating adversarial inputs.
5 | ///
6 | public enum AdversarialGenMethodChoice
7 | {
8 | ///
9 | /// use encoding.
10 | ///
11 | Encoding,
12 | }
13 | }
--------------------------------------------------------------------------------
/.stylecop/stylecop.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://raw.githubusercontent.com/DotNetAnalyzers/StyleCopAnalyzers/master/StyleCop.Analyzers/StyleCop.Analyzers/Settings/stylecop.schema.json",
3 | "description": "Configuration file for StyleCop Analyzers",
4 | "settings": {
5 | "documentationRules": {
6 | "companyName": "Microsoft"
7 | }
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/MetaOptimize/Exceptions.cs:
--------------------------------------------------------------------------------
1 | using System;
2 |
3 | ///
4 | /// some userdefined exception to throw when solution is not optimal.
5 | ///
6 | public class InfeasibleOrUnboundSolution : Exception
7 | {
8 | ///
9 | /// the exception.
10 | ///
11 | public void InfeasibleOrUnboundSolutionException()
12 | {
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/MetaOptimize/TrafficEngineering/PathType.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | ///
4 | /// an enumerator for path.
5 | ///
6 | public enum PathType
7 | {
8 | ///
9 | /// K shortest path.
10 | ///
11 | KSP,
12 | ///
13 | /// Predetermined.
14 | ///
15 | Predetermined,
16 | }
17 | }
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Microsoft Open Source Code of Conduct
2 |
3 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
4 |
5 | Resources:
6 |
7 | - [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/)
8 | - [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
9 | - Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns
10 |
--------------------------------------------------------------------------------
/MetaOptimize/PIFO/PIFOOptimizationEncoding.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | using System.Collections.Generic;
4 |
5 | ///
6 | /// The encoding of PIFO optimization.
7 | ///
8 | public class PIFOOptimizationEncoding : OptimizationEncoding
9 | {
10 | ///
11 | /// Packet ranks.
12 | ///
13 | public IDictionary RankVariables { get; set; }
14 | }
15 | }
--------------------------------------------------------------------------------
/MetaOptimize.Cli/MachineStat.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize.Cli {
2 | using System;
3 | using System.IO;
4 | ///
5 | /// Machine Computation Stat.
6 | ///
7 | public static class MachineStat {
8 | ///
9 | /// num threads.
10 | ///
11 | public static int numThreads = 48;
12 | ///
13 | /// num processors.
14 | ///
15 | public static int numProcessors = 48;
16 | }
17 | }
--------------------------------------------------------------------------------
/MetaOptimize/FailureAnalysis/CapacityAugmentSolution.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | using System.Collections.Generic;
4 | ///
5 | /// This is the set of links that we augment.
6 | ///
7 | public class CapacityAugmentSolution : OptimizationSolution
8 | {
9 | ///
10 | /// The status of the links (whether we augment or not).
11 | ///
12 | public IDictionary<(string, string), double> LagStatus { get; set; }
13 | }
14 | }
--------------------------------------------------------------------------------
/Topologies/simple.json:
--------------------------------------------------------------------------------
1 | {
2 | "directed": true,
3 | "multigraph": false,
4 | "graph": {},
5 | "nodes": [
6 | { "id": "a" },
7 | { "id": "b" },
8 | { "id": "c" },
9 | { "id": "d" }
10 | ],
11 | "links": [
12 | { "source": "a", "target": "b", "capacity": 10 },
13 | { "source": "a", "target": "c", "capacity": 10 },
14 | { "source": "b", "target": "d", "capacity": 10 },
15 | { "source": "c", "target": "d", "capacity": 10 }
16 | ]
17 | }
--------------------------------------------------------------------------------
/MetaOptimize/InnerEncodingMethod.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | ///
4 | /// The method for encoding inner problem.
5 | /// TODO: change the name of the file to match the name of class.
6 | ///
7 | public enum InnerRewriteMethodChoice
8 | {
9 | ///
10 | /// use kkt encoding.
11 | ///
12 | KKT,
13 | ///
14 | /// do the primal dual.
15 | ///
16 | PrimalDual,
17 | }
18 | }
--------------------------------------------------------------------------------
/Topologies/bottleneck.json:
--------------------------------------------------------------------------------
1 | {"directed": true, "multigraph": false, "graph": {}, "nodes": [{"label": "0", "pos": [-2, 2], "id": 0}, {"label": "1", "pos": [-2, -2], "id": 1}, {"label": "2", "pos": [2, 2], "id": 2}, {"label": "3", "pos": [2, -2], "id": 3}], "links": [{"capacity": 0.001, "source": 0, "target": 1}, {"capacity": 10.0, "source": 0, "target": 2}, {"capacity": 0.001, "source": 1, "target": 0}, {"capacity": 10.0, "source": 1, "target": 3}, {"capacity": 10.0, "source": 2, "target": 0}, {"capacity": 0.001, "source": 2, "target": 3}, {"capacity": 10.0, "source": 3, "target": 1}, {"capacity": 0.001, "source": 3, "target": 2}]}
--------------------------------------------------------------------------------
/Topologies/two-srcs.json:
--------------------------------------------------------------------------------
1 | {"directed": true, "multigraph": false, "graph": {}, "nodes": [{"label": "0", "pos": [-2, 2], "id": 0}, {"label": "1", "pos": [-2, 1], "id": 1}, {"label": "2", "pos": [0, 2], "id": 2}, {"label": "3", "pos": [-1, 0], "id": 3}, {"label": "4", "pos": [1, 0], "id": 4}], "links": [{"source": 0, "target": 2}, {"source": 0, "target": 1}, {"source": 1, "target": 0}, {"source": 1, "target": 3}, {"source": 2, "target": 0}, {"source": 2, "target": 3}, {"source": 2, "target": 4}, {"source": 3, "target": 1}, {"source": 3, "target": 2}, {"source": 3, "target": 4}, {"source": 4, "target": 2}, {"source": 4, "target": 3}]}
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | *.gz filter=lfs diff=lfs merge=lfs -text
2 | *.dll filter=lfs diff=lfs merge=lfs -text
3 | *.docx filter=lfs diff=lfs merge=lfs -text
4 | *.csv filter=lfs diff=lfs merge=lfs -text
5 | *.zip filter=lfs diff=lfs merge=lfs -text
6 | *.pdf filter=lfs diff=lfs merge=lfs -text
7 | *.pptx filter=lfs diff=lfs merge=lfs -text
8 | *.jpg filter=lfs diff=lfs merge=lfs -text
9 | *.png filter=lfs diff=lfs merge=lfs -text
10 | * !text !filter !merge !diff
11 | MetaOptimize/**/cpu_util_bin_clf_thold_65.json filter=lfs diff=lfs merge=lfs -text
12 | MetaOptimize/**/memory_utilization_train.json filter=lfs diff=lfs merge=lfs -text
13 |
--------------------------------------------------------------------------------
/MetaOptimize.Test/FailureAnalysisGurobiTests.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize.Test
2 | {
3 | using Gurobi;
4 | using Microsoft.VisualStudio.TestTools.UnitTesting;
5 | ///
6 | /// Uses Gurobi to do the tests.
7 | ///
8 | [TestClass]
9 | public class FailureAnalysisGurobiTests : FailureAnalysisBasicTests
10 | {
11 | ///
12 | /// Initialize the test class.
13 | ///
14 | [TestInitialize]
15 | public void Initialize()
16 | {
17 | this.CreateSolver = () => new GurobiSOS();
18 | }
19 | }
20 | }
--------------------------------------------------------------------------------
/MetaOptimize/VariableType.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | ///
4 | /// variable type.
5 | ///
6 | public enum VariableType
7 | {
8 | ///
9 | /// a continouos variable.
10 | ///
11 | CONTINUOUS,
12 | ///
13 | /// a binary variable.
14 | ///
15 | BINARY,
16 | ///
17 | /// a positive continous variable.
18 | ///
19 | POSITIVE_CONT,
20 | ///
21 | /// a negative continous variable.
22 | ///
23 | NEGATIVE_CONT,
24 | }
25 | }
--------------------------------------------------------------------------------
/Topologies/analyze_topo.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import networkx as nx
3 | from collections import defaultdict
4 | import os
5 | import parse_and_convert_graphml
6 | import itertools
7 |
8 |
9 | fname = f'Cogentco.json'
10 | G = parse_and_convert_graphml.read_graph_json(fname)
11 | # print(G)
12 | print(nx.diameter(G))
13 | # link_to_num_flows = defaultdict(int)
14 | # all_pair_sp = dict(nx.all_pairs_shortest_path(G))
15 | # # print(all_pair_sp)
16 | # for (n1, n2) in itertools.permutations(G.nodes(), 2):
17 | # sp = all_pair_sp[n1][n2]
18 | # for (e1, e2) in zip(sp, sp[1:]):
19 | # link_to_num_flows[e1, e2] += 1
20 |
21 | # print(link_to_num_flows)
22 |
--------------------------------------------------------------------------------
/MetaOptimize.Test/PopEncodingTestsORTools.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize.Test
2 | {
3 | using Google.OrTools;
4 | using Google.OrTools.LinearSolver;
5 | using Microsoft.VisualStudio.TestTools.UnitTesting;
6 |
7 | ///
8 | /// Tests for the pop encoder.
9 | ///
10 | [TestClass]
11 | public class PopEncodingTestsORTools : PopEncodingTests
12 | {
13 | ///
14 | /// Initialize the test class.
15 | ///
16 | [TestInitialize]
17 | public void Initialize()
18 | {
19 | this.CreateSolver = () => new ORToolsSolver();
20 | }
21 | }
22 | }
--------------------------------------------------------------------------------
/.github/workflows/dotnet.yml:
--------------------------------------------------------------------------------
1 | # This workflow will build a .NET project
2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-net
3 |
4 | name: .NET
5 |
6 | on:
7 | push:
8 | branches: [ "main" ]
9 | pull_request:
10 | branches: [ "main" ]
11 |
12 | jobs:
13 | build:
14 |
15 | runs-on: ubuntu-latest
16 |
17 | steps:
18 | - uses: actions/checkout@v4
19 | - name: Setup .NET
20 | uses: actions/setup-dotnet@v4
21 | with:
22 | dotnet-version: 6.0.x
23 | - name: Restore dependencies
24 | run: dotnet restore
25 | - name: Build
26 | run: dotnet build --no-restore
27 |
--------------------------------------------------------------------------------
/MetaOptimize/VectorBinPacking/VBPOptimizationEncoding.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize
6 | {
7 | using System.Collections.Generic;
8 |
9 | ///
10 | /// The encoding of an optimization.
11 | ///
12 | public class VBPptimizationEncoding : OptimizationEncoding
13 | {
14 | ///
15 | /// The demand expression for any pair of nodes.
16 | ///
17 | public IDictionary> ItemVariables { get; set; }
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/MetaOptimize/TrafficEngineering/TEOptimizationEncoding.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize
6 | {
7 | using System.Collections.Generic;
8 |
9 | ///
10 | /// The encoding of an optimization.
11 | ///
12 | public class TEOptimizationEncoding : OptimizationEncoding
13 | {
14 | ///
15 | /// The demand expression for any pair of nodes.
16 | ///
17 | public IDictionary<(string, string), Polynomial> DemandVariables { get; set; }
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/Topologies/toy-network.json:
--------------------------------------------------------------------------------
1 | {"directed": true, "multigraph": false, "graph": {}, "nodes": [{"label": "0", "pos": [-2, 2], "id": 0}, {"label": "1", "pos": [-1, 0], "id": 1}, {"label": "2", "pos": [-2, -2], "id": 2}, {"label": "3", "pos": [2, 2], "id": 3}, {"label": "4", "pos": [1, 0], "id": 4}, {"label": "5", "pos": [2, -2], "id": 5}], "links": [{"source": 0, "target": 3}, {"source": 0, "target": 1}, {"source": 1, "target": 0}, {"source": 1, "target": 4}, {"source": 1, "target": 2}, {"source": 2, "target": 1}, {"source": 2, "target": 5}, {"source": 3, "target": 0}, {"source": 3, "target": 4}, {"source": 4, "target": 1}, {"source": 4, "target": 3}, {"source": 4, "target": 5}, {"source": 5, "target": 2}, {"source": 5, "target": 4}]}
--------------------------------------------------------------------------------
/MetaOptimize/MetaOptimize.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 | <_Parameter1>$(MSBuildProjectName)Tests
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/MetaOptimize/OptimizationEncoding.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize
6 | {
7 | using System.Collections.Generic;
8 |
9 | ///
10 | /// The encoding of an optimization.
11 | ///
12 | public class OptimizationEncoding
13 | {
14 | ///
15 | /// The global objective.
16 | ///
17 | public TVar GlobalObjective { get; set; }
18 |
19 | ///
20 | /// The maximization objective.
21 | ///
22 | public Polynomial MaximizationObjective { get; set; }
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/MetaOptimize.Test/DemandPinningGurobiMin.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 | using System.Threading.Tasks;
6 |
7 | namespace MetaOptimize.Test
8 | {
9 | using Gurobi;
10 | using Microsoft.VisualStudio.TestTools.UnitTesting;
11 | ///
12 | /// uses Gurobi to test demand pinning.
13 | ///
14 | [TestClass]
15 | public class DemandPinningGurobiMin : DemandPinningTests
16 | {
17 | ///
18 | /// Initialize the test class.
19 | ///
20 | [TestInitialize]
21 | public void Initialize()
22 | {
23 | this.CreateSolver = () => new GurobiMin();
24 | }
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/MetaOptimize.Test/PopEncodingTestsGurobiSOS.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using Gurobi;
8 | using Microsoft.VisualStudio.TestTools.UnitTesting;
9 |
10 | ///
11 | /// Tests for the pop encoder.
12 | ///
13 | [TestClass]
14 | public class PopEncodingTestsGurobiSOS : PopEncodingTests
15 | {
16 | ///
17 | /// Initialize the test class.
18 | ///
19 | [TestInitialize]
20 | public void Initialize()
21 | {
22 | this.CreateSolver = () => new GurobiSOS();
23 | }
24 | }
25 | }
--------------------------------------------------------------------------------
/MetaOptimize.Test/OptimalityGapTestsGurobiSOS.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using Gurobi;
8 | using Microsoft.VisualStudio.TestTools.UnitTesting;
9 |
10 | ///
11 | /// Tests for the optimality gap.
12 | ///
13 | [TestClass]
14 | public class OptimalityGapTestsGurobiSOS : OptimalityGapTests
15 | {
16 | ///
17 | /// Initialize the test class.
18 | ///
19 | [TestInitialize]
20 | public void Initialize()
21 | {
22 | this.CreateSolver = () => new GurobiSOS();
23 | }
24 | }
25 | }
--------------------------------------------------------------------------------
/MetaOptimize.Test/PopEncodingTestsGurobi.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using Gurobi;
8 | using Microsoft.VisualStudio.TestTools.UnitTesting;
9 |
10 | ///
11 | /// Tests for the pop encoder.
12 | ///
13 | [TestClass]
14 | [Ignore]
15 | public class PopEncodingTestsGurobi : PopEncodingTests
16 | {
17 | ///
18 | /// Initialize the test class.
19 | ///
20 | [TestInitialize]
21 | public void Initialize()
22 | {
23 | this.CreateSolver = () => new SolverGurobi();
24 | }
25 | }
26 | }
--------------------------------------------------------------------------------
/MetaOptimize.Test/KKtOptimizationTestGurobiMin.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 | using System.Threading.Tasks;
6 |
7 | namespace MetaOptimize.Test
8 | {
9 | using Gurobi;
10 | using Microsoft.VisualStudio.TestTools.UnitTesting;
11 | ///
12 | /// Tests Gurobi Or Version.
13 | ///
14 | [TestClass]
15 | public class KKtOptimizationTestGurobiMin : KktOptimizationTests
16 | {
17 | ///
18 | /// Initialize the test class.
19 | ///
20 | [TestInitialize]
21 | public void Initialize()
22 | {
23 | this.CreateSolver = () => new GurobiMin();
24 | }
25 | }
26 | }
--------------------------------------------------------------------------------
/MetaOptimize.Test/DemandPinningTestsORTools.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 | using System.Threading.Tasks;
6 |
7 | namespace MetaOptimize.Test
8 | {
9 | using Google.OrTools.LinearSolver;
10 | using Microsoft.VisualStudio.TestTools.UnitTesting;
11 | ///
12 | /// uses Gurobi to test demand pinning.
13 | ///
14 | [TestClass]
15 | public class DemandPinningTestsORTools : DemandPinningTests
16 | {
17 | ///
18 | /// Initialize the test class.
19 | ///
20 | [TestInitialize]
21 | public void Initialize()
22 | {
23 | this.CreateSolver = () => new ORToolsSolver();
24 | }
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/MetaOptimize.Test/KktOptimizationTestsGurobiSOS.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using Gurobi;
8 | using Microsoft.VisualStudio.TestTools.UnitTesting;
9 |
10 | ///
11 | /// Some basic optimization tests.
12 | ///
13 | [TestClass]
14 | public class KktOptimizationTestsGurobiSOS : KktOptimizationTests
15 | {
16 | ///
17 | /// Initialize the test class.
18 | ///
19 | [TestInitialize]
20 | public void Initialize()
21 | {
22 | this.CreateSolver = () => new GurobiSOS();
23 | }
24 | }
25 | }
--------------------------------------------------------------------------------
/MetaOptimize.Test/OptimalEncodingTestsGurobiSOS.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using Gurobi;
8 | using Microsoft.VisualStudio.TestTools.UnitTesting;
9 |
10 | ///
11 | /// Tests for the optimal encoder.
12 | ///
13 | [TestClass]
14 | public class OptimalEncodingTestsGurobiSOS : OptimalEncodingTests
15 | {
16 | ///
17 | /// Initialize the test class.
18 | ///
19 | [TestInitialize]
20 | public void Initialize()
21 | {
22 | this.CreateSolver = () => new GurobiSOS();
23 | }
24 | }
25 | }
--------------------------------------------------------------------------------
/MetaOptimize.Test/OptimalityGapTestsGurobi.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using Gurobi;
8 | using Microsoft.VisualStudio.TestTools.UnitTesting;
9 |
10 | ///
11 | /// Tests for the optimality gap.
12 | ///
13 | [TestClass]
14 | [Ignore]
15 | public class OptimalityGapTestsGurobi : OptimalityGapTests
16 | {
17 | ///
18 | /// Initialize the test class.
19 | ///
20 | [TestInitialize]
21 | public void Initialize()
22 | {
23 | this.CreateSolver = () => new SolverGurobi();
24 | }
25 | }
26 | }
--------------------------------------------------------------------------------
/MetaOptimize.Test/PopEncodingTestsGurobiMin.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 | using System.Threading.Tasks;
6 |
7 | namespace MetaOptimize.Test
8 | {
9 | using Gurobi;
10 | using Microsoft.VisualStudio.TestTools.UnitTesting;
11 |
12 | ///
13 | /// Tests for the pop encoder.
14 | ///
15 | [TestClass]
16 | public class PopEncodingTestsGurobiMin : PopEncodingTests
17 | {
18 | ///
19 | /// Initialize the test class.
20 | ///
21 | [TestInitialize]
22 | public void Initialize()
23 | {
24 | this.CreateSolver = () => new GurobiMin();
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/MetaOptimize.Test/DemandPinningTestsGurobiSoS.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 | using System.Threading.Tasks;
6 |
7 | namespace MetaOptimize.Test
8 | {
9 | using Gurobi;
10 | using Microsoft.VisualStudio.TestTools.UnitTesting;
11 | ///
12 | /// uses Gurobi to test demand pinning.
13 | ///
14 | [TestClass]
15 | public class DemandPinningTestsGurobiSoS : DemandPinningTests
16 | {
17 | ///
18 | /// Initialize the test class.
19 | ///
20 | [TestInitialize]
21 | public void Initialize()
22 | {
23 | this.CreateSolver = () => new GurobiSOS();
24 | }
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/MetaOptimize.Test/OptimalEncodingGurobiMin.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 | using System.Threading.Tasks;
6 |
7 | namespace MetaOptimize.Test
8 | {
9 | using Gurobi;
10 | using Microsoft.VisualStudio.TestTools.UnitTesting;
11 |
12 | ///
13 | /// Tests for the optimal encoder.
14 | ///
15 | [TestClass]
16 | public class OptimalEncodingGurobiMin : OptimalEncodingTests
17 | {
18 | ///
19 | /// Initialize the test class.
20 | ///
21 | [TestInitialize]
22 | public void Initialize()
23 | {
24 | this.CreateSolver = () => new GurobiMin();
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/MetaOptimize.Test/OptimalityGapTestGurobiMin.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 | using System.Threading.Tasks;
6 |
7 | namespace MetaOptimize.Test
8 | {
9 | using Gurobi;
10 | using Microsoft.VisualStudio.TestTools.UnitTesting;
11 |
12 | ///
13 | /// Tests for the optimality gap.
14 | ///
15 | [TestClass]
16 | public class OptimalityGapTestGurobiMin : OptimalityGapTests
17 | {
18 | ///
19 | /// Initialize the test class.
20 | ///
21 | [TestInitialize]
22 | public void Initialize()
23 | {
24 | this.CreateSolver = () => new GurobiMin();
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/MetaOptimize.Test/OptimalityGapTestsORTools.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using Google.OrTools.LinearSolver;
8 | using Microsoft.VisualStudio.TestTools.UnitTesting;
9 |
10 | ///
11 | /// Tests for the optimality gap.
12 | ///
13 | [TestClass]
14 | public class OptimalityGapTestsORTools : OptimalityGapTests
15 | {
16 | ///
17 | /// Initialize the test class.
18 | ///
19 | [TestInitialize]
20 | public void Initialize()
21 | {
22 | this.CreateSolver = () => new ORToolsSolver();
23 | }
24 | }
25 | }
--------------------------------------------------------------------------------
/MetaOptimize.Test/KktOptimizationTestsGurobi.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using Gurobi;
8 | using Microsoft.VisualStudio.TestTools.UnitTesting;
9 |
10 | ///
11 | /// Some basic optimization tests.
12 | ///
13 | [TestClass]
14 | [Ignore]
15 | public class KktOptimizationTestsGurobi : KktOptimizationTests
16 | {
17 | ///
18 | /// Initialize the test class.
19 | ///
20 | [TestInitialize]
21 | public void Initialize()
22 | {
23 | this.CreateSolver = () => new SolverGurobi();
24 | }
25 | }
26 | }
--------------------------------------------------------------------------------
/MetaOptimize.Test/OptimalEncodingTestsGurobi.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using Gurobi;
8 | using Microsoft.VisualStudio.TestTools.UnitTesting;
9 |
10 | ///
11 | /// Tests for the optimal encoder.
12 | ///
13 | [TestClass]
14 | [Ignore]
15 | public class OptimalEncodingTestsGurobi : OptimalEncodingTests
16 | {
17 | ///
18 | /// Initialize the test class.
19 | ///
20 | [TestInitialize]
21 | public void Initialize()
22 | {
23 | this.CreateSolver = () => new SolverGurobi();
24 | }
25 | }
26 | }
--------------------------------------------------------------------------------
/MetaOptimize.Test/PopEncodingTestsGurobiBinary.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 | using System.Threading.Tasks;
6 |
7 | namespace MetaOptimize.Test
8 | {
9 | using Gurobi;
10 | using Microsoft.VisualStudio.TestTools.UnitTesting;
11 |
12 | ///
13 | /// Tests for the pop encoder.
14 | ///
15 | [TestClass]
16 | internal class PopEncodingTestsGurobiBinary : PopEncodingTests
17 | {
18 | ///
19 | /// Initialize the test class.
20 | ///
21 | [TestInitialize]
22 | public void Initialize()
23 | {
24 | this.CreateSolver = () => new GurobiBinary();
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/Topologies/feasible1.json:
--------------------------------------------------------------------------------
1 | {
2 | "directed": true,
3 | "multigraph": false,
4 | "graph": {},
5 | "nodes": [
6 | {"label": "0", "pos": [-2, 2], "id": 0},
7 | {"label": "1", "pos": [-2, -2], "id": 1},
8 | {"label": "2", "pos": [-1, 0], "id": 2},
9 | {"label": "3", "pos": [0, 0], "id": 3},
10 | {"label": "4", "pos": [1, 0], "id": 4},
11 | {"label": "5", "pos": [2, 0], "id": 5},
12 | {"label": "6", "pos": [3, 2], "id": 6},
13 | {"label": "7", "pos": [3, -2], "id": 7}
14 | ],
15 | "links": [
16 | {"source": 0, "target": 2, "capacity": 1},
17 | {"source": 1, "target": 2, "capacity": 5},
18 | {"source": 2, "target": 3},
19 | {"source": 3, "target": 4},
20 | {"source": 4, "target": 5},
21 | {"source": 5, "target": 6, "capacity": 5},
22 | {"source": 5, "target": 7, "capacity": 1}
23 | ]
24 | }
25 |
--------------------------------------------------------------------------------
/MetaOptimize.Test/KKTOptimizationTestsORTools.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using Google.OrTools.LinearSolver;
8 | using Microsoft.VisualStudio.TestTools.UnitTesting;
9 |
10 | ///
11 | /// Some basic optimization tests.
12 | ///
13 | [TestClass]
14 | public class KktOptimizationTestsORTools : KktOptimizationTests
15 | {
16 | ///
17 | /// Initialize the test class.
18 | ///
19 | [TestInitialize]
20 | public void Initialize()
21 | {
22 | this.CreateSolver = () => new ORToolsSolver();
23 | }
24 | }
25 | }
--------------------------------------------------------------------------------
/MetaOptimize.Test/OptimalEncodingTestsORTools.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using Google.OrTools.LinearSolver;
8 | using Microsoft.VisualStudio.TestTools.UnitTesting;
9 |
10 | ///
11 | /// Tests for the optimal encoder.
12 | ///
13 | [TestClass]
14 | public class OptimalEncodingTestsORTools : OptimalEncodingTests
15 | {
16 | ///
17 | /// Initialize the test class.
18 | ///
19 | [TestInitialize]
20 | public void Initialize()
21 | {
22 | this.CreateSolver = () => new ORToolsSolver();
23 | }
24 | }
25 | }
--------------------------------------------------------------------------------
/MetaOptimize.Test/PopEncodingTestsZen.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using Microsoft.VisualStudio.TestTools.UnitTesting;
8 | using ZenLib;
9 | using ZenLib.ModelChecking;
10 |
11 | ///
12 | /// Tests for the pop encoder.
13 | ///
14 | [TestClass]
15 | [Ignore]
16 | public class PopEncodingTestsZen : PopEncodingTests, ZenSolution>
17 | {
18 | ///
19 | /// Initialize the test class.
20 | ///
21 | [TestInitialize]
22 | public void Initialize()
23 | {
24 | this.CreateSolver = () => new SolverZen();
25 | }
26 | }
27 | }
--------------------------------------------------------------------------------
/MetaOptimize.Test/DemandPinningGurobiBinary.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 | using System.Threading.Tasks;
6 |
7 | namespace MetaOptimize.Test
8 | {
9 | using Gurobi;
10 | using Microsoft.VisualStudio.TestTools.UnitTesting;
11 | ///
12 | /// uses Gurobi to test demand pinning.
13 | ///
14 | [TestClass]
15 | [Ignore]
16 | public class DemandPinningGurobiBinary : DemandPinningTests
17 | {
18 | ///
19 | /// Initialize the test class.
20 | ///
21 | [TestInitialize]
22 | public void Initialize()
23 | {
24 | this.CreateSolver = () => new GurobiBinary();
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/MetaOptimize.Test/DemandPinningTestsGurobi.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 | using System.Threading.Tasks;
6 |
7 | namespace MetaOptimize.Test
8 | {
9 | using Gurobi;
10 | using Microsoft.VisualStudio.TestTools.UnitTesting;
11 | ///
12 | /// uses Gurobi to test demand pinning.
13 | ///
14 | [TestClass]
15 | [Ignore]
16 | public class DemandPinningTestsGurobi : DemandPinningTests
17 | {
18 | ///
19 | /// Initialize the test class.
20 | ///
21 | [TestInitialize]
22 | public void Initialize()
23 | {
24 | this.CreateSolver = () => new SolverGurobi();
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/MetaOptimize.Test/KKtOptimizationTestGurobiBinary.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 | using System.Threading.Tasks;
6 |
7 | namespace MetaOptimize.Test
8 | {
9 | using Gurobi;
10 | using Microsoft.VisualStudio.TestTools.UnitTesting;
11 | ///
12 | /// Tests Gurobi Or Version.
13 | ///
14 | [TestClass]
15 | [Ignore]
16 | public class KKtOptimizationTestGurobiBinary : PopEncodingTests
17 | {
18 | ///
19 | /// Initialize the test class.
20 | ///
21 | [TestInitialize]
22 | public void Initialize()
23 | {
24 | this.CreateSolver = () => new GurobiBinary();
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/MetaOptimize.Test/OptimalEncodingTestsBinary.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 | using System.Threading.Tasks;
6 |
7 | namespace MetaOptimize.Test
8 | {
9 | using Gurobi;
10 | using Microsoft.VisualStudio.TestTools.UnitTesting;
11 |
12 | ///
13 | /// Tests for the optimal encoder.
14 | ///
15 | [TestClass]
16 | [Ignore]
17 | public class OptimalEncodingTestsBinary : OptimalEncodingTests
18 | {
19 | ///
20 | /// Initialize the test class.
21 | ///
22 | [TestInitialize]
23 | public void Initialize()
24 | {
25 | this.CreateSolver = () => new GurobiBinary();
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/MetaOptimize.Test/OptimalityGapTestsZen.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using Microsoft.VisualStudio.TestTools.UnitTesting;
8 | using ZenLib;
9 | using ZenLib.ModelChecking;
10 |
11 | ///
12 | /// Tests for the optimality gap.
13 | ///
14 | [TestClass]
15 | [Ignore]
16 | public class OptimalityGapTestsZen : OptimalityGapTests, ZenSolution>
17 | {
18 | ///
19 | /// Initialize the test class.
20 | ///
21 | [TestInitialize]
22 | public void Initialize()
23 | {
24 | this.CreateSolver = () => new SolverZen();
25 | }
26 | }
27 | }
--------------------------------------------------------------------------------
/MetaOptimize/TrafficEngineering/TEMaxFlowOptimizationSolution.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize
6 | {
7 | using System.Collections.Generic;
8 |
9 | ///
10 | /// A solution to an optimization problem.
11 | ///
12 | public class TEMaxFlowOptimizationSolution : TEOptimizationSolution
13 | {
14 | ///
15 | /// The flow allocation for the problem.
16 | ///
17 | public IDictionary<(string, string), double> Flows { get; set; }
18 |
19 | ///
20 | /// Each sample total demand.
21 | ///
22 | public IList TotalDemmandMetSample = null;
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/MetaOptimize.Test/OptimalityGapTestsGurobiBinary.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 | using System.Threading.Tasks;
6 |
7 | namespace MetaOptimize.Test
8 | {
9 | using Gurobi;
10 | using Microsoft.VisualStudio.TestTools.UnitTesting;
11 |
12 | ///
13 | /// Tests for the optimality gap.
14 | ///
15 | [TestClass]
16 | [Ignore]
17 | public class OptimalityGapTestsGurobiBinary : OptimalityGapTests
18 | {
19 | ///
20 | /// Initialize the test class.
21 | ///
22 | [TestInitialize]
23 | public void Initialize()
24 | {
25 | this.CreateSolver = () => new GurobiBinary();
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/MetaOptimize.Test/KktOptimizationTestsZen.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using Microsoft.VisualStudio.TestTools.UnitTesting;
8 | using ZenLib;
9 | using ZenLib.ModelChecking;
10 |
11 | ///
12 | /// Some basic optimization tests.
13 | ///
14 | [TestClass]
15 | [Ignore]
16 | public class KktOptimizationTestsZen : KktOptimizationTests, ZenSolution>
17 | {
18 | ///
19 | /// Initialize the test class.
20 | ///
21 | [TestInitialize]
22 | public void Initialize()
23 | {
24 | this.CreateSolver = () => new SolverZen();
25 | }
26 | }
27 | }
--------------------------------------------------------------------------------
/MetaOptimize/TrafficEngineering/TEOptimizationSolution.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | using System.Collections.Generic;
4 | ///
5 | /// interface for TE optimization solution.
6 | ///
7 | public class TEOptimizationSolution : OptimizationSolution
8 | {
9 | ///
10 | /// The demands for the problem.
11 | ///
12 | public IDictionary<(string, string), double> Demands { get; set; }
13 |
14 | ///
15 | /// The flow path allocation for the problem.
16 | ///
17 | public IDictionary FlowsPaths { get; set; }
18 |
19 | ///
20 | /// The objective by the optimization.
21 | ///
22 | public double MaxObjective { get; set; }
23 | }
24 | }
--------------------------------------------------------------------------------
/MetaOptimize/VectorBinPacking/FFDMethod.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | ///
4 | /// Different weight-types we can use for the FFD encoding.
5 | ///
6 | public enum FFDMethodChoice
7 | {
8 | ///
9 | /// sequentially place items without sorting.
10 | ///
11 | FF,
12 | ///
13 | /// use the sum over different dimensions to sort items.
14 | ///
15 | FFDSum,
16 | ///
17 | /// use the product of different dimensions to sort items.
18 | ///
19 | FFDProd,
20 | ///
21 | /// use division of first dimension by the second dimension to sort items (only for two dimension).
22 | ///
23 | FFDDiv,
24 | }
25 | }
--------------------------------------------------------------------------------
/MetaOptimize.Test/DemandPinningTestZen.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 | using System.Threading.Tasks;
6 |
7 | namespace MetaOptimize.Test
8 | {
9 | using Microsoft.VisualStudio.TestTools.UnitTesting;
10 | using ZenLib;
11 | using ZenLib.ModelChecking;
12 |
13 | ///
14 | /// testing demandPinning.
15 | ///
16 | [TestClass]
17 | [Ignore]
18 | public class DemandPinningTestZen : DemandPinningTests, ZenSolution>
19 | {
20 | ///
21 | /// Initialize the test class.
22 | ///
23 | [TestInitialize]
24 | public void Initialize()
25 | {
26 | this.CreateSolver = () => new SolverZen();
27 | }
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/MetaOptimize.Cli/MetaOptimize.Cli.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | MetaOptimize.Cli.Program
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/MetaOptimize.Test/OptimalEncodingTestsZen.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using Microsoft.VisualStudio.TestTools.UnitTesting;
8 | using ZenLib;
9 | using ZenLib.ModelChecking;
10 |
11 | ///
12 | /// Tests for the optimal encoder.
13 | ///
14 | [TestClass]
15 | [Ignore]
16 | public class OptimalEncodingTestsZen : OptimalEncodingTests, ZenSolution>
17 | {
18 | ///
19 | /// Initialize the test class.
20 | ///
21 | [TestInitialize]
22 | [Ignore]
23 | public void Initialize()
24 | {
25 | this.CreateSolver = () => new SolverZen();
26 | }
27 | }
28 | }
--------------------------------------------------------------------------------
/Topologies/bottleneck-dumbell.json:
--------------------------------------------------------------------------------
1 | {"directed": true, "multigraph": false, "graph": {}, "nodes": [{"label": "0", "pos": [-2, 2], "id": 0}, {"label": "1", "pos": [-2, 1.5], "id": 1}, {"label": "2", "pos": [0, 2], "id": 2}, {"label": "3", "pos": [0, 1.5], "id": 3}, {"label": "4", "pos": [-1, 0], "id": 4}, {"label": "5", "pos": [-1, -0.5], "id": 5}, {"label": "6", "pos": [1, 0], "id": 6}, {"label": "7", "pos": [1, -0.5], "id": 7}], "links": [{"source": 0, "target": 1}, {"source": 0, "target": 2}, {"source": 1, "target": 0}, {"source": 2, "target": 3}, {"source": 2, "target": 0}, {"source": 2, "target": 7}, {"source": 3, "target": 2}, {"source": 3, "target": 4}, {"source": 4, "target": 5}, {"source": 4, "target": 3}, {"source": 5, "target": 4}, {"source": 5, "target": 6}, {"source": 6, "target": 7}, {"source": 6, "target": 5}, {"source": 7, "target": 6}, {"source": 7, "target": 2}]}
--------------------------------------------------------------------------------
/MetaOptimize/PIFO/PIFOOptimizationSolution.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | using System.Collections.Generic;
4 |
5 | ///
6 | /// Solution to PIFO optimization.
7 | ///
8 | public class PIFOOptimizationSolution : OptimizationSolution
9 | {
10 | ///
11 | /// cost of allocation.
12 | ///
13 | public double Cost;
14 |
15 | ///
16 | /// rank of incomming packets.
17 | ///
18 | public IDictionary Ranks;
19 |
20 | ///
21 | /// order of dequeued packets.
22 | ///
23 | public IDictionary Order;
24 |
25 | ///
26 | /// packet admitted or dropped.
27 | ///
28 | public IDictionary Admit;
29 | }
30 | }
--------------------------------------------------------------------------------
/Topologies/dumbell-bottleneck.json:
--------------------------------------------------------------------------------
1 | {"directed": true, "multigraph": false, "graph": {}, "nodes": [{"label": "0", "pos": [-2, 2], "id": 0}, {"label": "1", "pos": [-2, 1.5], "id": 1}, {"label": "2", "pos": [0, 2], "id": 2}, {"label": "3", "pos": [0, 1.5], "id": 3}, {"label": "4", "pos": [-1, 1], "id": 4}, {"label": "5", "pos": [-1, 0.5], "id": 5}, {"label": "6", "pos": [1, 0], "id": 6}, {"label": "7", "pos": [1, -0.5], "id": 7}], "links": [{"source": 0, "target": 1}, {"source": 0, "target": 2}, {"source": 1, "target": 0}, {"source": 1, "target": 4}, {"source": 2, "target": 3}, {"source": 2, "target": 0}, {"source": 2, "target": 6}, {"source": 3, "target": 2}, {"source": 3, "target": 4}, {"source": 4, "target": 5}, {"source": 4, "target": 1}, {"source": 4, "target": 3}, {"source": 5, "target": 4}, {"source": 5, "target": 6}, {"source": 6, "target": 7}, {"source": 6, "target": 5}, {"source": 6, "target": 2}, {"source": 7, "target": 6}]}
--------------------------------------------------------------------------------
/Topologies/partitioning/utils.py:
--------------------------------------------------------------------------------
1 | import imp
2 | from itertools import permutations
3 | import networkx as nx
4 | import numpy as np
5 |
6 | def to_np_arr(arr):
7 | return arr if isinstance(arr, np.ndarray) else np.array(arr)
8 |
9 |
10 | def is_partition_valid(G, nodes_in_part):
11 | G_sub = G.subgraph(nodes_in_part)
12 | for src, target in permutations(G_sub.nodes, 2):
13 | if not nx.has_path(G_sub, src, target):
14 | print(src, target)
15 | return False
16 | return True
17 |
18 |
19 | def all_partitions_contiguous(prob, p_v):
20 |
21 | partition_vector = to_np_arr(p_v)
22 | for k in np.unique(partition_vector):
23 | if not is_partition_valid(
24 | prob,
25 | prob.G.subgraph(
26 | np.argwhere(partition_vector == k).flatten())):
27 | print(k)
28 | return False
29 | return True
--------------------------------------------------------------------------------
/MetaOptimize/PIFO/PIFOAvgDelayOptimalEncoder.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | ///
4 | /// PIFO Optimal Encoder for avg delay.
5 | ///
6 | public class PIFOAvgDelayOptimalEncoder : PIFOOptimalEncoder
7 | {
8 | ///
9 | /// create a new instance.
10 | ///
11 | public PIFOAvgDelayOptimalEncoder(ISolver solver, int NumPackets, int maxRank)
12 | : base(solver, NumPackets, maxRank)
13 | {
14 | }
15 |
16 | ///
17 | /// compute the cost of an ordering of packets.
18 | ///
19 | protected override void ComputeCost()
20 | {
21 | PIFOUtils.ComputeAvgDelayPlacement(this.Solver, this.cost, this.NumPackets, this.MaxRank,
22 | this.placementVariables, this.rankVariables);
23 | }
24 | }
25 | }
--------------------------------------------------------------------------------
/MetaOptimize/PIFO/SPPIFOAvgDelayEncoder.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | ///
4 | /// SPPIFO with avg delay as cost.
5 | ///
6 | public class SPPIFOAvgDelayEncoder : SPPIFOEncoder
7 | {
8 | ///
9 | /// create a new instance.
10 | ///
11 | public SPPIFOAvgDelayEncoder(ISolver solver, int numPackets, int numQueues, int maxRank)
12 | : base(solver, numPackets, numQueues, maxRank)
13 | {
14 | }
15 |
16 | ///
17 | /// compute the cost of an ordering of packets.
18 | ///
19 | protected override void ComputeCost()
20 | {
21 | PIFOUtils.ComputeAvgDelayDequeueAfter(this.Solver, this.cost, this.NumPackets, this.MaxRank,
22 | this.dequeueAfter, this.packetRankVar);
23 | }
24 | }
25 | }
--------------------------------------------------------------------------------
/MetaOptimize/IList.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | // TODO: should we change the name for this class and its function? It seems like it applies more broadly than just for traffic engineering.
6 | namespace MetaOptimize
7 | {
8 | using System;
9 | using System.Collections.Generic;
10 | ///
11 | /// An interface for sepcifying demand list for Primal Dual Encoding.
12 | ///
13 | public interface IList
14 | {
15 | ///
16 | /// The demand list used for an specific pair.
17 | ///
18 | public ISet GetValueForPair(string src, string dst);
19 |
20 | ///
21 | /// get random non-zero demand between specific pair.
22 | ///
23 | public double GetRandomNonZeroValueForPair(Random rng, string src, string dst);
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/MetaOptimize/VectorBinPacking/VBPOptimizationSolution.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize
6 | {
7 | using System.Collections.Generic;
8 |
9 | ///
10 | /// A solution to an optimization problem.
11 | ///
12 | public class VBPOptimizationSolution : OptimizationSolution
13 | {
14 | ///
15 | /// The number of bins used.
16 | ///
17 | public int TotalNumBinsUsed { get; set; }
18 |
19 | ///
20 | /// The demands for the problem.
21 | ///
22 | public IDictionary> Items { get; set; }
23 |
24 | ///
25 | /// The flow allocation for the problem.
26 | ///
27 | public IDictionary> Placement { get; set; }
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/MetaOptimize/IDemandList.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | // TODO: should we change the name for this class and its function? It seems like it applies more broadly than just for traffic engineering.
6 | namespace MetaOptimize
7 | {
8 | using System;
9 | using System.Collections.Generic;
10 | ///
11 | /// An interface for sepcifying demand list for Primal Dual Encoding.
12 | ///
13 | public interface IDemandList
14 | {
15 | ///
16 | /// The demand list used for an specific pair.
17 | ///
18 | public ISet GetDemandsForPair(string src, string dst);
19 |
20 | ///
21 | /// get random non-zero demand between specific pair.
22 | ///
23 | public double GetRandomNonZeroDemandForPair(Random rng, string src, string dst);
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/Topologies/ext/modularity/FastCommunity_w_GPL_v1.0.1/Makefile:
--------------------------------------------------------------------------------
1 | # Aaron Clauset
2 | # Makefile Oct2003
3 | # feel free to hack this to pieces
4 |
5 | #### local macros
6 | # remove without fussing about it
7 | RM = /bin/rm -f
8 |
9 | # compiler name and flags
10 | CCC = g++
11 | CCFLAGS = -O3 -fomit-frame-pointer -funroll-loops -fforce-addr -fexpensive-optimizations -Wno-deprecated
12 |
13 | # loader flags
14 | LDFLAGS =
15 |
16 | ### local program information
17 | EXEC=FastCommunity_wMH
18 | SOURCES= fastcommunity_w_mh.cc
19 |
20 | ### intermediate objects
21 | OBJECTS = $(SOURCES: .cc=.o)
22 |
23 | ### includes
24 | INCLUDES =
25 |
26 | ### headers
27 | HEADERS = maxheap.h vektor.h
28 |
29 | ### targets, dependencies and actions
30 | $(EXEC): $(OBJECTS) Makefile
31 | $(LINK.cc) $(CCFLAGS) -o $(EXEC) $(OBJECTS)
32 |
33 | ### sort out dependencies
34 | depend:
35 | makedepend $(INCLUDES) $(HEADERS) $(SOURCES)
36 |
37 | ### housekeeping
38 |
39 | clean:
40 | $(RM) *.o *~ $(EXEC)
41 |
42 |
--------------------------------------------------------------------------------
/MetaOptimize/PIFO/PIFOWithDropAvgDelayEncoder.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | ///
4 | /// PIFO with limited size queue that drops packets.
5 | ///
6 | public class PIFOWithDropAvgDelayEncoder : PIFOWithDropEncoder
7 | {
8 | ///
9 | /// create a new instance.
10 | ///
11 | public PIFOWithDropAvgDelayEncoder(ISolver solver, int numPackets, int maxRank, int maxQueueSize)
12 | : base(solver, numPackets, maxRank, maxQueueSize)
13 | {
14 | }
15 |
16 | ///
17 | /// compute the cost of an ordering of packets.
18 | ///
19 | protected override void ComputeCost()
20 | {
21 | PIFOUtils.ComputeAvgDelayPlacement(this.Solver, this.cost, this.NumPackets, this.MaxRank,
22 | this.placementVariables, this.rankVariables, this.packetAdmitOrDrop);
23 | }
24 | }
25 | }
--------------------------------------------------------------------------------
/MetaOptimize/PIFO/AIFOAvgDelayEncoder.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | ///
4 | /// AIFO with average delay as cost.
5 | ///
6 | public class AIFOAvgDelayEncoder : AIFOEncoder
7 | {
8 | ///
9 | /// create a new instance.
10 | ///
11 | public AIFOAvgDelayEncoder(ISolver solver, int numPackets, int maxRank, int maxQueueSize,
12 | int windowSize, double burstParam) : base(solver, numPackets, maxRank, maxQueueSize, windowSize, burstParam)
13 | {
14 | }
15 |
16 | ///
17 | /// compute the cost of an ordering of packets.
18 | ///
19 | protected override void ComputeCost()
20 | {
21 | PIFOUtils.ComputeAvgDelayDequeueAfter(this.Solver, this.cost, this.NumPackets, this.MaxRank,
22 | this.dequeueAfter, this.packetRankVar, this.packetAdmitOrDrop);
23 | }
24 | }
25 | }
--------------------------------------------------------------------------------
/MetaOptimize/PIFO/ModifiedSPPIFOAvgDelayEncoder.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | ///
4 | /// modified SP-PIFO with avg delay as cost.
5 | ///
6 | public class ModifiedSPPIFOAvgDelayEncoder : ModifiedSPPIFOEncoder
7 | {
8 | ///
9 | /// The constructor.
10 | ///
11 | public ModifiedSPPIFOAvgDelayEncoder(ISolver solver, int numPackets, int splitQueue, int numQueues, int splitRank, int maxRank)
12 | : base(solver, numPackets, splitQueue, numQueues, splitRank, maxRank)
13 | {
14 | }
15 |
16 | ///
17 | /// compute the cost of an ordering of packets.
18 | ///
19 | protected override void ComputeCost()
20 | {
21 | PIFOUtils.ComputeAvgDelayDequeueAfter(this.Solver, this.cost, this.NumPackets, this.MaxRank,
22 | this.dequeueAfter, this.packetRankVar);
23 | }
24 | }
25 | }
--------------------------------------------------------------------------------
/MetaOptimize/GurobiEnvironment.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.IO;
3 | using Gurobi;
4 |
5 | namespace MetaOptimize
6 | {
7 | internal class GurobiEnvironment
8 | {
9 | private static GRBEnv _env; // 1 instance == 1 license use, max 10 concurrent for all users!
10 | public static GRBEnv Instance
11 | {
12 | get
13 | {
14 | if (_env == null)
15 | {
16 | // for 8.1 and later
17 | _env = new GRBEnv(true);
18 | try
19 | {
20 | _env.Start();
21 | }
22 | catch (GRBException e) when (e.Message.Contains("No Gurobi license found") || e.Message.Contains("Failed to connect"))
23 | {
24 | throw new Exception("Gurobi license error, please fix the IP above", e);
25 | }
26 | }
27 | return _env;
28 | }
29 | }
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/MetaOptimize/PIFO/SPPIFOWithDropAvgDelayEncoder.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | ///
4 | /// SP-PIFO with limited size queue that drops packets.
5 | ///
6 | public class SPPIFOWithDropAvgDelayEncoder : SPPIFOWithDropEncoder
7 | {
8 | ///
9 | /// create a new instance.
10 | ///
11 | public SPPIFOWithDropAvgDelayEncoder(ISolver solver, int numPackets,
12 | int numQueues, int maxRank, int totalQueueSize) : base(solver, numPackets, numQueues, maxRank, totalQueueSize)
13 | {
14 | }
15 |
16 | ///
17 | /// compute the cost of an ordering of packets.
18 | ///
19 | protected override void ComputeCost()
20 | {
21 | PIFOUtils.ComputeAvgDelayDequeueAfter(this.Solver, this.cost, this.NumPackets, this.MaxRank,
22 | this.dequeueAfter, this.packetRankVar, this.packetAdmitOrDrop);
23 | }
24 | }
25 | }
--------------------------------------------------------------------------------
/MetaOptimize/TrafficEngineering/DemandPinningLinkNegativeException.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | using System;
4 | using System.Collections.Generic;
5 | using System.Linq;
6 | ///
7 | /// shows that the pinned demands cause some links to be negative.
8 | ///
9 | [Serializable]
10 | public class DemandPinningLinkNegativeException : Exception
11 | {
12 | ///
13 | /// edge negative.
14 | ///
15 | public (string, string) Edge { get; }
16 | ///
17 | /// DP threshold.
18 | ///
19 | public double Threshold { get; }
20 | ///
21 | /// shows that the pinned demands cause some links to be negative.
22 | ///
23 | public DemandPinningLinkNegativeException(string message, (string, string) edge, double threshold)
24 | : base(message)
25 | {
26 | this.Edge = edge;
27 | this.Threshold = threshold;
28 | }
29 | }
30 | }
--------------------------------------------------------------------------------
/MetaOptimize.Test/MetaOptimize.Test.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 | net8.0
4 | false
5 | true
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 | all
14 | runtime; build; native; contentfiles; analyzers; buildtransitive
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) Microsoft Corporation.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE
22 |
--------------------------------------------------------------------------------
/.vscode/launch.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "0.2.0",
3 | "configurations": [
4 | {
5 | // Use IntelliSense to find out which attributes exist for C# debugging
6 | // Use hover for the description of the existing attributes
7 | // For further information visit https://github.com/dotnet/vscode-csharp/blob/main/debugger-launchjson.md
8 | "name": ".NET Core Launch (console)",
9 | "type": "coreclr",
10 | "request": "launch",
11 | "preLaunchTask": "build",
12 | // If you have changed target frameworks, make sure to update the program path.
13 | // "program": "${workspaceFolder}/MetaOptimize.Test/bin/Debug/net8.0/MetaOptimize.Test.dll",
14 | "program": "${workspaceFolder}/MetaOptimize.Cli/bin/Debug/net8.0/MetaOptimize.Cli.exe",
15 | "args": [],
16 | "cwd": "${workspaceFolder}/MetaOptimize.Cli",
17 | // For more information about the 'console' field, see https://aka.ms/VSCode-CS-LaunchJson-Console
18 | "console": "internalConsole",
19 | "stopAtEntry": false
20 | },
21 | {
22 | "name": ".NET Core Attach",
23 | "type": "coreclr",
24 | "request": "attach"
25 | }
26 | ]
27 | }
--------------------------------------------------------------------------------
/.vscode/tasks.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "2.0.0",
3 | "tasks": [
4 | {
5 | "label": "build",
6 | "command": "dotnet",
7 | "type": "process",
8 | "args": [
9 | "build",
10 | "${workspaceFolder}/MetaOptimize.sln",
11 | "/property:GenerateFullPaths=true",
12 | "/consoleloggerparameters:NoSummary"
13 | ],
14 | "problemMatcher": "$msCompile"
15 | },
16 | {
17 | "label": "publish",
18 | "command": "dotnet",
19 | "type": "process",
20 | "args": [
21 | "publish",
22 | "${workspaceFolder}/MetaOptimize.sln",
23 | "/property:GenerateFullPaths=true",
24 | "/consoleloggerparameters:NoSummary"
25 | ],
26 | "problemMatcher": "$msCompile"
27 | },
28 | {
29 | "label": "watch",
30 | "command": "dotnet",
31 | "type": "process",
32 | "args": [
33 | "watch",
34 | "run",
35 | "--project",
36 | "${workspaceFolder}/MetaOptimize.sln"
37 | ],
38 | "problemMatcher": "$msCompile"
39 | }
40 | ]
41 | }
--------------------------------------------------------------------------------
/SUPPORT.md:
--------------------------------------------------------------------------------
1 | # TODO: The maintainer of this repo has not yet edited this file
2 |
3 | **REPO OWNER**: Do you want Customer Service & Support (CSS) support for this product/project?
4 |
5 | - **No CSS support:** Fill out this template with information about how to file issues and get help.
6 | - **Yes CSS support:** Fill out an intake form at [aka.ms/onboardsupport](https://aka.ms/onboardsupport). CSS will work with/help you to determine next steps.
7 | - **Not sure?** Fill out an intake as though the answer were "Yes". CSS will help you decide.
8 |
9 | *Then remove this first heading from this SUPPORT.MD file before publishing your repo.*
10 |
11 | # Support
12 |
13 | ## How to file issues and get help
14 |
15 | This project uses GitHub Issues to track bugs and feature requests. Please search the existing
16 | issues before filing new issues to avoid duplicates. For new issues, file your bug or
17 | feature request as a new Issue.
18 |
19 | For help and questions about using this project, please **REPO MAINTAINER: INSERT INSTRUCTIONS HERE
20 | FOR HOW TO ENGAGE REPO OWNERS OR COMMUNITY FOR HELP. COULD BE A STACK OVERFLOW TAG OR OTHER
21 | CHANNEL. WHERE WILL YOU HELP PEOPLE?**.
22 |
23 | ## Microsoft Support Policy
24 |
25 | Support for this **PROJECT or PRODUCT** is limited to the resources listed above.
26 |
--------------------------------------------------------------------------------
/MetaOptimize/OptimizationSolution.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 | // TODO: remove?
5 | namespace MetaOptimize
6 | {
7 | using System.Collections.Generic;
8 |
9 | ///
10 | /// A solution to an optimization problem.
11 | ///
12 | public class OptimizationSolution
13 | {
14 | // ///
15 | // /// The total demand met by the optimization.
16 | // ///
17 | // public double TotalDemandMet { get; set; }
18 |
19 | // ///
20 | // /// The demands for the problem.
21 | // ///
22 | // public IDictionary<(string, string), double> Demands { get; set; }
23 |
24 | // ///
25 | // /// The flow allocation for the problem.
26 | // ///
27 | // public IDictionary<(string, string), double> Flows { get; set; }
28 |
29 | // ///
30 | // /// The flow path allocation for the problem.
31 | // ///
32 | // public IDictionary FlowsPaths { get; set; }
33 |
34 | // ///
35 | // /// Each sample total demand.
36 | // ///
37 | // public IList TotalDemmandMetSample = null;
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/MetaOptimize/FailureAnalysis/FailureAnalysisOptimizationSolution.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | using System.Collections.Generic;
4 | ///
5 | /// interface for Capacity Plan optimization solution.
6 | ///
7 | public class FailureAnalysisOptimizationSolution : OptimizationSolution
8 | {
9 | ///
10 | /// The demands for the problem.
11 | ///
12 | public IDictionary<(string, string), double> Demands { get; set; }
13 | ///
14 | /// The up/down status for the links.
15 | ///
16 | public IDictionary LagStatus { get; set; }
17 |
18 | ///
19 | /// The flow allocation for the problem.
20 | ///
21 | public IDictionary<(string, string), double> Flows { get; set; }
22 |
23 | ///
24 | /// The flow path allocation for the problem.
25 | ///
26 | public IDictionary FlowsPaths { get; set; }
27 |
28 | ///
29 | /// The total flow on each link.
30 | ///
31 | public IDictionary LagFlows { get; set; }
32 |
33 | ///
34 | /// The objective by the optimization.
35 | ///
36 | public double MaxObjective { get; set; }
37 | }
38 | }
--------------------------------------------------------------------------------
/.stylecop/StyleCop.props:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | False
6 |
7 |
8 | all
9 | runtime; build; native; contentfiles; analyzers; buildtransitive
10 |
11 |
12 |
13 |
14 | False
15 |
16 |
17 |
18 |
19 | ..\.stylecop\stylecop.ruleset
20 | true
21 |
22 |
26 | $(OutputPath)\$(AssemblyName).xml
27 | bin\$(Configuration)\$(TargetFramework)\$(AssemblyName).xml
28 |
29 |
30 |
--------------------------------------------------------------------------------
/Topologies/toy-network-2.json:
--------------------------------------------------------------------------------
1 | {"directed": true, "multigraph": false, "graph": {}, "nodes": [{"label": "0", "pos": [-2, 2], "id": 0}, {"label": "1", "pos": [-1, 0], "id": 1}, {"label": "2", "pos": [-2, -2], "id": 2}, {"label": "3", "pos": [2, 2], "id": 3}, {"label": "4", "pos": [1, 0], "id": 4}, {"label": "5", "pos": [2, -2], "id": 5}, {"label": "6", "pos": [-2, -4], "id": 6}, {"label": "7", "pos": [-1, -5], "id": 7}, {"label": "8", "pos": [-2, -6], "id": 8}, {"label": "9", "pos": [2, -4], "id": 9}, {"label": "10", "pos": [1, -5], "id": 10}, {"label": "11", "pos": [2, -6], "id": 11}], "links": [{"source": 0, "target": 1}, {"source": 0, "target": 3}, {"source": 1, "target": 0}, {"source": 1, "target": 2}, {"source": 1, "target": 4}, {"source": 1, "target": 7}, {"source": 2, "target": 1}, {"source": 2, "target": 5}, {"source": 2, "target": 6}, {"source": 3, "target": 0}, {"source": 3, "target": 4}, {"source": 4, "target": 1}, {"source": 4, "target": 3}, {"source": 4, "target": 5}, {"source": 4, "target": 10}, {"source": 5, "target": 2}, {"source": 5, "target": 4}, {"source": 5, "target": 9}, {"source": 6, "target": 7}, {"source": 6, "target": 9}, {"source": 6, "target": 2}, {"source": 7, "target": 6}, {"source": 7, "target": 8}, {"source": 7, "target": 10}, {"source": 7, "target": 1}, {"source": 8, "target": 7}, {"source": 8, "target": 11}, {"source": 9, "target": 6}, {"source": 9, "target": 10}, {"source": 9, "target": 5}, {"source": 10, "target": 7}, {"source": 10, "target": 9}, {"source": 10, "target": 11}, {"source": 10, "target": 4}, {"source": 11, "target": 8}, {"source": 11, "target": 10}]}
--------------------------------------------------------------------------------
/Topologies/toy-network-3.json:
--------------------------------------------------------------------------------
1 | {"directed": true, "multigraph": false, "graph": {}, "nodes": [{"label": "0", "pos": [-2, 2], "id": 0}, {"label": "1", "pos": [-1, 0], "id": 1}, {"label": "2", "pos": [-2, -2], "id": 2}, {"label": "3", "pos": [2, 2], "id": 3}, {"label": "4", "pos": [1, 0], "id": 4}, {"label": "5", "pos": [2, -2], "id": 5}, {"label": "6", "pos": [-2, -4], "id": 6}, {"label": "7", "pos": [-1, -5], "id": 7}, {"label": "8", "pos": [-2, -6], "id": 8}, {"label": "9", "pos": [2, -4], "id": 9}, {"label": "10", "pos": [1, -5], "id": 10}, {"label": "11", "pos": [2, -6], "id": 11}], "links": [{"source": 0, "target": 1}, {"source": 0, "target": 3}, {"source": 1, "target": 0}, {"source": 1, "target": 2}, {"source": 1, "target": 4}, {"source": 1, "target": 7}, {"source": 2, "target": 1}, {"source": 2, "target": 5}, {"source": 2, "target": 6}, {"source": 3, "target": 0}, {"source": 3, "target": 4}, {"source": 4, "target": 1}, {"source": 4, "target": 3}, {"source": 4, "target": 5}, {"source": 4, "target": 10}, {"source": 5, "target": 2}, {"source": 5, "target": 4}, {"source": 5, "target": 9}, {"source": 6, "target": 7}, {"source": 6, "target": 9}, {"source": 6, "target": 2}, {"source": 7, "target": 6}, {"source": 7, "target": 8}, {"source": 7, "target": 10}, {"source": 7, "target": 1}, {"source": 8, "target": 7}, {"source": 8, "target": 11}, {"source": 9, "target": 6}, {"source": 9, "target": 10}, {"source": 9, "target": 5}, {"source": 10, "target": 7}, {"source": 10, "target": 9}, {"source": 10, "target": 11}, {"source": 10, "target": 4}, {"source": 11, "target": 8}, {"source": 11, "target": 10}]}
--------------------------------------------------------------------------------
/MetaOptimize/PathComparer.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize
6 | {
7 | using System.Collections.Generic;
8 |
9 | ///
10 | /// A custom path comparer.
11 | ///
12 | public class PathComparer : IEqualityComparer
13 | {
14 | ///
15 | /// Equality between paths.
16 | ///
17 | /// The first path.
18 | /// The second path.
19 | /// True or false.
20 | public bool Equals(string[] x, string[] y)
21 | {
22 | if (x.Length != y.Length)
23 | {
24 | return false;
25 | }
26 |
27 | for (int i = 0; i < x.Length; i++)
28 | {
29 | if (x[i] != y[i])
30 | {
31 | return false;
32 | }
33 | }
34 |
35 | return true;
36 | }
37 |
38 | ///
39 | /// Hashcode for a path.
40 | ///
41 | /// The path.
42 | /// An int hashcode.
43 | public int GetHashCode(string[] obj)
44 | {
45 | int hash = 7;
46 | foreach (string x in obj)
47 | {
48 | hash = hash * 31 + x.GetHashCode();
49 | }
50 |
51 | return hash;
52 | }
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/MetaOptimize/PairwiseDemandList.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize
6 | {
7 | using System;
8 | using System.Collections.Generic;
9 | using System.Linq;
10 | using ZenLib;
11 |
12 | ///
13 | /// A DemandList to specify demand Quantization lvls between every pair.
14 | ///
15 | public class PairwiseDemandList : IList
16 | {
17 | ///
18 | /// demand quantization lvls.
19 | ///
20 | private IDictionary<(string, string), ISet> demandList;
21 |
22 | ///
23 | /// class constructor function.
24 | ///
25 | public PairwiseDemandList(IDictionary<(string, string), ISet> demandList)
26 | {
27 | this.demandList = demandList;
28 | }
29 |
30 | ///
31 | /// The demand list used for an specific pair.
32 | ///
33 | public ISet GetValueForPair(string src, string dst) {
34 | return this.demandList[(src, dst)];
35 | }
36 |
37 | ///
38 | /// get random non-zero demand between specific pair.
39 | ///
40 | public double GetRandomNonZeroValueForPair(Random rng, string src, string dst) {
41 | var demandlvls = new HashSet(this.GetValueForPair(src, dst));
42 | demandlvls.Remove(0);
43 | var demand = demandlvls.ToList()[rng.Next(demandlvls.Count())];
44 | return demand;
45 | }
46 | }
47 | }
--------------------------------------------------------------------------------
/MetaOptimize.Cli/Parser.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Cli
6 | {
7 | using MetaOptimize;
8 | using Newtonsoft.Json.Linq;
9 |
10 | ///
11 | /// Parsing helper functions.
12 | ///
13 | /// TODO: it would be good to modify to add a debug mode instead of the console.writeline.
14 | public static class Parser
15 | {
16 | ///
17 | /// Reads a topology in JSON format.
18 | ///
19 | /// The file path.
20 | /// The path to the file that contains the paths in the topology.
21 | /// To scale topology capacities.
22 | /// The topology from the file.
23 | public static Topology ReadTopologyJson(string filePath, string pathPath = null, double scaleFactor = 1)
24 | {
25 | var text = File.ReadAllText(filePath);
26 | var obj = (dynamic)JObject.Parse(text);
27 | var nodes = obj.nodes;
28 | var edges = obj.links;
29 | Console.WriteLine("======= " + pathPath);
30 | var topology = new Topology(pathPath);
31 | foreach (var node in nodes)
32 | {
33 | topology.AddNode(node.id.ToString());
34 | }
35 |
36 | foreach (var edge in edges)
37 | {
38 | // TODO: the number of decimal points should be part of a config file.
39 | double capacity = Math.Round((double)edge.capacity * scaleFactor, 4);
40 | // Console.WriteLine(capacity);
41 | topology.AddEdge(edge.source.ToString(), edge.target.ToString(), capacity);
42 | }
43 | return topology;
44 | }
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/MetaOptimize/GenericList.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize
6 | {
7 | using System;
8 | using System.Collections.Generic;
9 | using System.Linq;
10 |
11 | ///
12 | /// A DemandList to specify demand Quantization lvls between every pair.
13 | ///
14 | public class GenericList : IList
15 | {
16 | ///
17 | /// demand quantization lvls.
18 | ///
19 | public ISet List;
20 |
21 | ///
22 | /// class constructor function.
23 | ///
24 | public GenericList(ISet inputList)
25 | {
26 | this.List = inputList;
27 | }
28 |
29 | ///
30 | /// The demand list used for an specific pair.
31 | ///
32 | public ISet GetValueForPair(string src, string dst) {
33 | return this.List;
34 | }
35 |
36 | ///
37 | /// get random non-zero demand between specific pair.
38 | ///
39 | public double GetRandomNonZeroValueForPair(Random rng, string src, string dst) {
40 | var demandlvls = new HashSet(this.GetValueForPair(src, dst));
41 | demandlvls.Remove(0);
42 | var demand = demandlvls.ToList()[rng.Next(demandlvls.Count())];
43 | return demand;
44 | }
45 |
46 | ///
47 | /// get random demand between specific pair.
48 | ///
49 | public double GetRandomValueForPair(Random rng, string src, string dst) {
50 | var lvls = new HashSet(this.GetValueForPair(src, dst));
51 | lvls.Add(0);
52 | var value = lvls.ToList()[rng.Next(lvls.Count())];
53 | return value;
54 | }
55 | }
56 | }
--------------------------------------------------------------------------------
/Topologies/parse_and_convert_graphml.py:
--------------------------------------------------------------------------------
1 | import json
2 | import numpy as np
3 | from networks import ring_topology
4 | import networkx as nx
5 | from networkx.readwrite import json_graph
6 |
7 | def read_graph_graphml(fname):
8 | assert fname.endswith('.graphml')
9 | file_G = nx.read_graphml(fname).to_directed()
10 | if isinstance(file_G, nx.MultiDiGraph):
11 | file_G = nx.DiGraph(file_G)
12 | G = []
13 | for scc_ids in nx.strongly_connected_components(file_G):
14 | scc = file_G.subgraph(scc_ids)
15 | if len(scc) > len(G):
16 | print("len is: " + str(len(scc)))
17 | G = scc
18 | G = nx.convert_node_labels_to_integers(G)
19 | for u,v in G.edges():
20 | G[u][v]['capacity'] = 1000.0
21 | return G
22 |
23 | def write_graph_json(G: nx.Graph, fname):
24 | assert fname.endswith('json')
25 | with open(fname, 'w') as w:
26 | json.dump(json_graph.node_link_data(G), w)
27 |
28 |
29 | def read_graph_json(fname):
30 | assert fname.endswith('json')
31 | with open(fname, "r") as fp:
32 | node_link_data = json.load(fp)
33 | G = json_graph.node_link_graph(node_link_data)
34 | return G
35 |
36 |
37 |
38 | # # # print("Hi")
39 | # topo_name_list = [
40 | # # "GtsCe",
41 | # # "Cogentco",
42 | # # "Kdl",
43 | # "Uninett2010",
44 | # ]
45 | # for topo_name in topo_name_list:
46 | # fname = f'../../../ncflow/topologies/topology-zoo/{topo_name}.graphml'
47 | # G = read_graph_graphml(fname)
48 | # fname = f'./{topo_name}.json'
49 | # write_graph_json(G, fname)
50 |
51 | # num_nodes_list = [
52 | # 20,
53 | # # 200,
54 | # # 400,
55 | # ]
56 | # for num_nodes in num_nodes_list:
57 | # fname = f"./ring_{num_nodes}.json"
58 | # G = ring_topology(num_nodes=num_nodes, cap=1000)
59 | # # for edge in G.edges:
60 | # # print(edge)
61 | # write_graph_json(G, fname=fname)
62 |
63 |
64 | # G = read_graph_json("../Topologies/b4-teavar.json")
65 |
66 | # print(nx.diameter(G))
67 |
--------------------------------------------------------------------------------
/MetaOptimize/GenericDemandList.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize
6 | {
7 | using System;
8 | using System.Collections.Generic;
9 | using System.Linq;
10 |
11 | ///
12 | /// A DemandList to specify demand Quantization lvls between every pair.
13 | ///
14 | public class GenericDemandList : IDemandList
15 | {
16 | ///
17 | /// demand quantization lvls.
18 | ///
19 | public ISet demandList;
20 |
21 | ///
22 | /// class constructor function.
23 | ///
24 | public GenericDemandList(ISet demandList)
25 | {
26 | this.demandList = demandList;
27 | }
28 |
29 | ///
30 | /// The demand list used for an specific pair.
31 | ///
32 | public ISet GetDemandsForPair(string src, string dst) {
33 | return this.demandList;
34 | }
35 |
36 | ///
37 | /// get random non-zero demand between specific pair.
38 | ///
39 | public double GetRandomNonZeroDemandForPair(Random rng, string src, string dst) {
40 | var demandlvls = new HashSet(this.GetDemandsForPair(src, dst));
41 | demandlvls.Remove(0);
42 | var demand = demandlvls.ToList()[rng.Next(demandlvls.Count())];
43 | return demand;
44 | }
45 |
46 | ///
47 | /// get random demand between specific pair.
48 | ///
49 | public double GetRandomDemandForPair(Random rng, string src, string dst) {
50 | var demandlvls = new HashSet(this.GetDemandsForPair(src, dst));
51 | demandlvls.Add(0);
52 | var demand = demandlvls.ToList()[rng.Next(demandlvls.Count())];
53 | return demand;
54 | }
55 | }
56 | }
--------------------------------------------------------------------------------
/Directory.Packages.props:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | strict
5 | disable
6 | 9999
7 | preview
8 | enable
9 | net8.0
10 | True
11 | true
12 |
13 |
14 | x64
15 |
16 | Microsoft
17 | © Microsoft Corporation. All rights reserved.
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/Topologies/ring_20.json:
--------------------------------------------------------------------------------
1 | {"directed": true, "multigraph": false, "graph": {}, "nodes": [{"label": "0", "id": 0}, {"label": "1", "id": 1}, {"label": "2", "id": 2}, {"label": "3", "id": 3}, {"label": "4", "id": 4}, {"label": "5", "id": 5}, {"label": "6", "id": 6}, {"label": "7", "id": 7}, {"label": "8", "id": 8}, {"label": "9", "id": 9}, {"label": "10", "id": 10}, {"label": "11", "id": 11}, {"label": "12", "id": 12}, {"label": "13", "id": 13}, {"label": "14", "id": 14}, {"label": "15", "id": 15}, {"label": "16", "id": 16}, {"label": "17", "id": 17}, {"label": "18", "id": 18}, {"label": "19", "id": 19}], "links": [{"capacity": 1000, "source": 0, "target": 1}, {"capacity": 1000, "source": 0, "target": 19}, {"capacity": 1000, "source": 1, "target": 0}, {"capacity": 1000, "source": 1, "target": 2}, {"capacity": 1000, "source": 2, "target": 1}, {"capacity": 1000, "source": 2, "target": 3}, {"capacity": 1000, "source": 3, "target": 2}, {"capacity": 1000, "source": 3, "target": 4}, {"capacity": 1000, "source": 4, "target": 3}, {"capacity": 1000, "source": 4, "target": 5}, {"capacity": 1000, "source": 5, "target": 4}, {"capacity": 1000, "source": 5, "target": 6}, {"capacity": 1000, "source": 6, "target": 5}, {"capacity": 1000, "source": 6, "target": 7}, {"capacity": 1000, "source": 7, "target": 6}, {"capacity": 1000, "source": 7, "target": 8}, {"capacity": 1000, "source": 8, "target": 7}, {"capacity": 1000, "source": 8, "target": 9}, {"capacity": 1000, "source": 9, "target": 8}, {"capacity": 1000, "source": 9, "target": 10}, {"capacity": 1000, "source": 10, "target": 9}, {"capacity": 1000, "source": 10, "target": 11}, {"capacity": 1000, "source": 11, "target": 10}, {"capacity": 1000, "source": 11, "target": 12}, {"capacity": 1000, "source": 12, "target": 11}, {"capacity": 1000, "source": 12, "target": 13}, {"capacity": 1000, "source": 13, "target": 12}, {"capacity": 1000, "source": 13, "target": 14}, {"capacity": 1000, "source": 14, "target": 13}, {"capacity": 1000, "source": 14, "target": 15}, {"capacity": 1000, "source": 15, "target": 14}, {"capacity": 1000, "source": 15, "target": 16}, {"capacity": 1000, "source": 16, "target": 15}, {"capacity": 1000, "source": 16, "target": 17}, {"capacity": 1000, "source": 17, "target": 16}, {"capacity": 1000, "source": 17, "target": 18}, {"capacity": 1000, "source": 18, "target": 17}, {"capacity": 1000, "source": 18, "target": 19}, {"capacity": 1000, "source": 19, "target": 18}, {"capacity": 1000, "source": 19, "target": 0}]}
--------------------------------------------------------------------------------
/Topologies/partitioning/abstract_partitioning_method.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | class AbstractPartitioningMethod(object):
5 |
6 | def __init__(self, *, num_partitions=None, weighted=True):
7 | if isinstance(num_partitions, int):
8 | self._num_partitions = num_partitions
9 |
10 | self._use_cache = True
11 | self._weighted = weighted
12 |
13 | self._best_partitions = {
14 | }
15 |
16 | @property
17 | def name(self):
18 | raise Exception("not implemented")
19 |
20 | @property
21 | def use_cache(self):
22 | return self._use_cache
23 |
24 | @use_cache.setter
25 | def use_cache(self, use_cache):
26 | self._use_cache = use_cache
27 |
28 | @property
29 | def G(self):
30 | return self._G
31 |
32 | @property
33 | def partition_vector(self):
34 | return self._partition_vector
35 |
36 | @property
37 | def size_of_largest_partition(self):
38 | counts = np.bincount(self._partition_vector)
39 | return counts[np.argmax(counts)]
40 |
41 | @property
42 | def largest_partition_index(self):
43 | counts = np.bincount(self._partition_vector)
44 | return np.argmax(counts)
45 |
46 | @property
47 | def num_partitions(self):
48 | if not hasattr(self, '_num_partitions'):
49 | return -1
50 | return self._num_partitions
51 |
52 | @property
53 | def weighted(self):
54 | return self._weighted
55 |
56 | # Private method #
57 | def _default_num_partitions(self, G):
58 | return int(np.sqrt(len(G.nodes)))
59 |
60 | def partition(self, G, topo_name, override_cache=False):
61 | if not override_cache and self._use_cache and G.name in self._best_partitions:
62 | return self._best_partitions[G.name]
63 |
64 | self._partition_vector = self._partition_impl(G, topo_name)
65 | return self._partition_vector
66 |
67 | #################
68 | # Public method #
69 | #################
70 | @property
71 | def name(self):
72 | raise NotImplementedError(
73 | 'name needs to be implemented in the subclass: {}'.format(
74 | self.__class__))
75 |
76 | def _partition_impl(self, G, topo_name):
77 | raise NotImplementedError(
78 | '_partition_impl needs to be implemented in the subclass: {}'.format(
79 | self.__class__))
--------------------------------------------------------------------------------
/MetaOptimize/PIFO/PIFOWithDropEncoder.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | using System;
4 | using System.Collections.Generic;
5 | using Gurobi;
6 |
7 | ///
8 | /// PIFO with limited queue size.
9 | ///
10 | public class PIFOWithDropEncoder : PIFOOptimalEncoder
11 | {
12 | ///
13 | /// queue size.
14 | ///
15 | public int MaxQueueSize { get; set; }
16 |
17 | ///
18 | /// = 1 if packet admitted to the queue. = 0 if dropped.
19 | ///
20 | public IDictionary packetAdmitOrDrop { get; set; }
21 |
22 | ///
23 | /// The constructor.
24 | ///
25 | public PIFOWithDropEncoder(ISolver solver, int NumPackets, int maxRank, int maxQueueSize)
26 | : base(solver, NumPackets, maxRank)
27 | {
28 | this.MaxQueueSize = maxQueueSize;
29 | }
30 |
31 | ///
32 | /// create additional variables.
33 | ///
34 | protected override void CreateAdditionalVariables()
35 | {
36 | packetAdmitOrDrop = new Dictionary();
37 | for (int pid = 0; pid < this.NumPackets; pid++) {
38 | packetAdmitOrDrop[pid] = this.Solver.CreateVariable("admit_" + pid, GRB.BINARY);
39 | }
40 | }
41 |
42 | private void EnforceQueueSize()
43 | {
44 | var capConstr = new Polynomial(new Term(-this.MaxQueueSize));
45 | for (int pid = 0; pid < this.NumPackets; pid++) {
46 | capConstr.Add(new Term(1, this.packetAdmitOrDrop[pid]));
47 | }
48 | this.Solver.AddLeqZeroConstraint(capConstr);
49 | }
50 |
51 | ///
52 | /// additional constraints for the modified variants.
53 | ///
54 | protected override void AddOtherConstraints()
55 | {
56 | this.EnforceQueueSize();
57 | }
58 |
59 | ///
60 | /// return whether packet is admitted to the queue.
61 | ///
62 | protected override int GetAdmitSolution(TSolution solution, int packetID)
63 | {
64 | return Convert.ToInt32(this.Solver.GetVariable(solution, this.packetAdmitOrDrop[packetID]));
65 | }
66 | }
67 | }
--------------------------------------------------------------------------------
/MetaOptimize.Test/KktOptimizationTests.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using System;
8 | using System.Collections.Generic;
9 | using Microsoft.VisualStudio.TestTools.UnitTesting;
10 |
11 | ///
12 | /// Some basic optimization tests.
13 | ///
14 | [TestClass]
15 | public class KktOptimizationTests
16 | {
17 | ///
18 | /// Function to create a new solver.
19 | ///
20 | internal Func> CreateSolver;
21 |
22 | ///
23 | /// Test that maximization works via the kkt conditions.
24 | ///
25 | [TestMethod]
26 | public void TestMaximizeKkt()
27 | {
28 | // Choose Solver and initialize variables.
29 |
30 | var solver = CreateSolver();
31 | var x = solver.CreateVariable("x");
32 | var y = solver.CreateVariable("y");
33 | var encoder = new KKTRewriteGenerator(solver, new HashSet() { x, y }, new HashSet());
34 |
35 | // x + 2y == 10
36 | encoder.AddEqZeroConstraint(new Polynomial(new Term(1, x), new Term(2, y), new Term(-10)));
37 |
38 | // x >= 0, y>= 0
39 | encoder.AddLeqZeroConstraint(new Polynomial(new Term(-1, x)));
40 | encoder.AddLeqZeroConstraint(new Polynomial(new Term(-1, y)));
41 |
42 | var obj = solver.CreateVariable("objective");
43 | encoder.AddLeqZeroConstraint(new Polynomial(new Term(1, obj), new Term(-100)));
44 |
45 | // maximize y - x
46 | encoder.AddMaximizationConstraints(new Polynomial(new Term(1, y), new Term(-1, x)));
47 |
48 | var solution = solver.Maximize(obj);
49 | solver.GetVariable(solution, x);
50 |
51 | Assert.IsTrue(Utils.IsApproximately(0, solver.GetVariable(solution, x)));
52 | Assert.IsTrue(Utils.IsApproximately(5, solver.GetVariable(solution, y)));
53 | }
54 |
55 | ///
56 | /// Test that maximization works via the kkt conditions.
57 | ///
58 | [TestMethod]
59 | public void TestMaximizeKkt2()
60 | {
61 | }
62 | }
63 | }
--------------------------------------------------------------------------------
/MetaOptimize/GurobiMin.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using Gurobi;
5 |
6 | namespace MetaOptimize
7 | {
8 | ///
9 | /// Uses the min of two positive
10 | /// functions instead of SoS variables.
11 | ///
12 | public class GurobiMin : GurobiSOS
13 | {
14 | ///
15 | /// Ensure at least one of these terms is zero.
16 | ///
17 | ///
18 | ///
19 | public override void AddOrEqZeroConstraint(Polynomial polynomial1, Polynomial polynomial2)
20 | {
21 | this.AddOrEqZeroConstraint(this.fromPolyToLinExpr(polynomial1), this.fromPolyToLinExpr(polynomial2.Negate()));
22 | }
23 | ///
24 | /// Over-rides the method in NoParams.
25 | ///
26 | ///
27 | ///
28 | public void AddOrEqZeroConstraint(GRBLinExpr expr1, GRBLinExpr expr2)
29 | {
30 | // Create auxilary variable for each polynomial
31 | var var_1 = this._model.AddVar(Double.NegativeInfinity, Double.PositiveInfinity, 0, GRB.CONTINUOUS, "aux_" + this._auxiliaryVars.Count);
32 | this._auxiliaryVars.Add($"aux_{this._auxiliaryVars.Count}", var_1);
33 |
34 | var var_2 = this._model.AddVar(Double.NegativeInfinity, Double.PositiveInfinity, 0, GRB.CONTINUOUS, "aux_" + this._auxiliaryVars.Count);
35 | this._auxiliaryVars.Add($"aux_{this._auxiliaryVars.Count}", var_2);
36 |
37 | this._model.AddConstr(expr1, GRB.EQUAL, var_1, "eq_index_" + this._constraintEqCount++);
38 | this._model.AddConstr(expr2, GRB.EQUAL, var_2, "eq_index_" + this._constraintEqCount++);
39 |
40 | // add min constraint
41 | var auxiliaries = new GRBVar[] { var_1, var_2 };
42 | var MinResult = this._model.AddVar(Double.NegativeInfinity, Double.PositiveInfinity, 0, GRB.CONTINUOUS, "MinResult_" + this._auxiliaryVars.Count);
43 | this._auxiliaryVars.Add($"MinResult_{this._auxiliaryVars.Count}", MinResult);
44 | this._model.AddGenConstrMin(MinResult, auxiliaries, Double.PositiveInfinity, $"auxC_{this._auxiliaryVars.Count}");
45 | this._model.AddConstr(MinResult, GRB.EQUAL, 0, "MinConstr_" + this._constraintEqCount++);
46 | }
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/MetaOptimize.Test/OptimalityGapTests.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using System;
8 | using Microsoft.VisualStudio.TestTools.UnitTesting;
9 |
10 | ///
11 | /// Test that the optimiality gap maximization is working.
12 | ///
13 | [TestClass]
14 | public class OptimalityGapTests
15 | {
16 | ///
17 | /// Function to create a new solver.
18 | ///
19 | internal Func> CreateSolver;
20 |
21 | ///
22 | /// Test that the optimality encoder works for a topology with one edge.
23 | ///
24 | [TestMethod]
25 | public void TestOptimialityGap()
26 | {
27 | var topology = new Topology();
28 | topology.AddNode("a");
29 | topology.AddNode("b");
30 | topology.AddNode("c");
31 | topology.AddNode("d");
32 | topology.AddEdge("a", "b", capacity: 10);
33 | topology.AddEdge("a", "c", capacity: 10);
34 | topology.AddEdge("b", "d", capacity: 10);
35 | topology.AddEdge("c", "d", capacity: 10);
36 |
37 | // create the optimal encoder.
38 | var solver = CreateSolver();
39 | var optimalEncoder = new TEMaxFlowOptimalEncoder(solver, maxNumPaths: 1);
40 |
41 | // create the pop encoder.
42 | var partition = topology.RandomPartition(2);
43 | var popEncoder = new PopEncoder(solver, maxNumPaths: 1, numPartitions: 2, demandPartitions: partition);
44 | var adversarialInputGenerator = new TEAdversarialInputGenerator(topology, maxNumPaths: 1);
45 |
46 | var (optimalSolution, popSolution) = adversarialInputGenerator.MaximizeOptimalityGap(optimalEncoder, popEncoder);
47 |
48 | // Debugging information.
49 | Console.WriteLine(Newtonsoft.Json.JsonConvert.SerializeObject(optimalSolution, Newtonsoft.Json.Formatting.Indented));
50 | Console.WriteLine(Newtonsoft.Json.JsonConvert.SerializeObject(popSolution, Newtonsoft.Json.Formatting.Indented));
51 |
52 | Assert.IsTrue(Utils.IsApproximately(40, ((TEMaxFlowOptimizationSolution)optimalSolution).MaxObjective));
53 | Assert.IsTrue(Utils.IsApproximately(20, ((TEMaxFlowOptimizationSolution)popSolution).MaxObjective));
54 | }
55 | }
56 | }
--------------------------------------------------------------------------------
/MetaOptimize/GurobiTerminationCallback.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize
6 | {
7 | using System;
8 | using System.Diagnostics;
9 | using Gurobi;
10 | class GurobiTerminationCallback : GRBCallback
11 | {
12 | private GRBModel model;
13 | private double prevObj;
14 | private Stopwatch timer;
15 | private double terminateNoImprovement_ms;
16 |
17 | // TODO: It does not seem like your checking for improvement? am I missiing something?
18 | // mostly confused because you call the variable terminateNoImprovement_ms but seems like it should just be terminate?
19 | public GurobiTerminationCallback(GRBModel model, double terminateNoImprovement_ms)
20 | {
21 | this.model = model;
22 | this.prevObj = double.NaN;
23 | this.timer = null;
24 | this.terminateNoImprovement_ms = terminateNoImprovement_ms;
25 | }
26 |
27 | protected override void Callback()
28 | {
29 | try
30 | {
31 | if (where == GRB.Callback.MIPNODE)
32 | {
33 | var obj = GetDoubleInfo(GRB.Callback.MIPNODE_OBJBST);
34 | CallCallback(obj);
35 | }
36 | }
37 | catch (GRBException e)
38 | {
39 | Console.WriteLine("Error code: " + e.ErrorCode);
40 | Console.WriteLine(e.Message);
41 | Console.WriteLine(e.StackTrace);
42 | }
43 | catch (Exception e)
44 | {
45 | Console.WriteLine("Error during callback");
46 | Console.WriteLine(e.StackTrace);
47 | }
48 | throw new Exception("Should not enter this function.");
49 | }
50 |
51 | public void CallCallback(double obj)
52 | {
53 | if (this.timer == null || Double.IsNaN(prevObj))
54 | {
55 | prevObj = obj;
56 | this.timer = Stopwatch.StartNew();
57 | }
58 | if (Math.Abs(obj - prevObj) > 0.01)
59 | {
60 | prevObj = obj;
61 | this.timer = Stopwatch.StartNew();
62 | }
63 | if (this.timer.ElapsedMilliseconds > terminateNoImprovement_ms)
64 | {
65 | this.model.Terminate();
66 | }
67 | }
68 |
69 | public void ResetTermination()
70 | {
71 | this.prevObj = double.NaN;
72 | this.timer = null;
73 | }
74 | }
75 | }
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | ## Security
4 |
5 | Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet) and [Xamarin](https://github.com/xamarin).
6 |
7 | If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/security.md/definition), please report it to us as described below.
8 |
9 | ## Reporting Security Issues
10 |
11 | **Please do not report security vulnerabilities through public GitHub issues.**
12 |
13 | Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/security.md/msrc/create-report).
14 |
15 | If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/security.md/msrc/pgp).
16 |
17 | You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc).
18 |
19 | Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
20 |
21 | * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
22 | * Full paths of source file(s) related to the manifestation of the issue
23 | * The location of the affected source code (tag/branch/commit or direct URL)
24 | * Any special configuration required to reproduce the issue
25 | * Step-by-step instructions to reproduce the issue
26 | * Proof-of-concept or exploit code (if possible)
27 | * Impact of the issue, including how an attacker might exploit the issue
28 |
29 | This information will help us triage your report more quickly.
30 |
31 | If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/security.md/msrc/bounty) page for more details about our active programs.
32 |
33 | ## Preferred Languages
34 |
35 | We prefer all communications to be in English.
36 |
37 | ## Policy
38 |
39 | Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/security.md/cvd).
40 |
41 |
42 |
--------------------------------------------------------------------------------
/MetaOptimize/FailureAnalysis/README.md:
--------------------------------------------------------------------------------
1 | # Failure Analysis Module
2 |
3 | This module provides tools for analyzing and optimizing network topologies under failure scenarios. It includes various encoders and generators for capacity augmentation, failure analysis, and optimization.
4 |
5 | ## Core Components
6 |
7 | ### Solution Classes
8 | - `CapacityAugmentSolution`: Represents the solution for capacity augmentation problems, containing the status of links that need to be augmented.
9 | - `FailureAnalysisOptimizationSolution`: Represents the solution for failure analysis problems, including demands, link status, flow allocations, and optimization objectives.
10 |
11 | ### Capacity Augmentation
12 | - `CapacityAugmentEncoder`: Base class for encoding capacity augmentation problems. It finds the minimum number of links to add to carry target demand after failures.
13 | - `CapacityAugmentsOnExisting`: Specialized encoder that only increases capacity on existing links rather than adding new ones.
14 | - `CapacityAugmenterV2`: Enhanced version of the capacity augmentation encoder with additional features.
15 |
16 | ### Failure Analysis
17 | - `FailureAnalysisEncoder`: Base class for encoding failure analysis problems.
18 | - `FailureAnalysisEncoderWithUnequalPaths`: Extends the base encoder to handle unequal path scenarios.
19 | - `FailureAnalysisMLUCutEncoder`: Specialized encoder for Maximum Link Utilization (MLU) cut scenarios.
20 |
21 | ### Adversarial Generators
22 | - `FailureAnalysisAdversarialGenerator`: Main generator for creating adversarial scenarios in failure analysis.
23 | - `FailureAnalysisAdversarialGeneratorForUnequalPaths`: Specialized generator for handling unequal path scenarios.
24 | - `FailureAnalysisWithMetaNodeAdversarialGenerator`: Generator that supports meta-node scenarios in failure analysis.
25 |
26 | ## Key Features
27 |
28 | 1. **Capacity Augmentation**
29 | - Find minimum links to add for target demand
30 | - Increase capacity on existing links
31 | - Handle various network topologies
32 |
33 | 2. **Failure Analysis**
34 | - Analyze network behavior under failures
35 | - Comprehensive analysis of the impact across **all** possible failure scenarios.
36 | - Support for meta-nodes and unequal paths; specific failure scenario modeling which includes only investigating failures that do not disconnect the graph.
37 |
38 | 3. **TE Objectives**
39 | - Maximize total allocated flow
40 | - Maximum Link Utilization (MLU)
41 |
42 | ## Example usage:
43 |
44 | - Please see MetaOptimize.Test for example test cases that use different parts of this code.
45 |
46 | ## Notes
47 | - All classes are generic and can work with different variable types (TVar) and solution types (TSolution)
48 | - The module supports both binary and continuous optimization problems
49 | - Various path computation methods are supported (KSP, etc.)
--------------------------------------------------------------------------------
/MetaOptimize.Test/DemandPinningTests.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize.Test
2 | {
3 | using System;
4 | using Gurobi;
5 | using Microsoft.VisualStudio.TestTools.UnitTesting;
6 |
7 | ///
8 | /// tests demand pinning.
9 | ///
10 | [TestClass]
11 | public class DemandPinningTests
12 | {
13 | ///
14 | /// Function to create a new solver.
15 | /// This uses a delegate method so that we can plug and play different solvers.
16 | ///
17 | internal Func> CreateSolver;
18 |
19 | ///
20 | /// Using a threshold of 5, tests the demandpinning solution
21 | /// on diamond topo.
22 | ///
23 | [TestMethod]
24 | public void TestDiamondTopo()
25 | {
26 | var topology = new Topology();
27 | topology.AddNode("a");
28 | topology.AddNode("b");
29 | topology.AddNode("c");
30 | topology.AddNode("d");
31 | topology.AddEdge("a", "b", capacity: 10);
32 | topology.AddEdge("a", "c", capacity: 10);
33 | topology.AddEdge("b", "d", capacity: 10);
34 | topology.AddEdge("c", "d", capacity: 10);
35 |
36 | double threshold = 5;
37 | int k = 2;
38 |
39 | // create the optimal encoder.
40 | var solver = CreateSolver();
41 | var optimalEncoder = new TEMaxFlowOptimalEncoder(solver, maxNumPaths: k);
42 | var heuristicEncoder = new DemandPinningEncoder(solver, maxNumPaths: k, threshold: threshold);
43 | var adversarialInputGenerator = new TEAdversarialInputGenerator(topology, maxNumPaths: k);
44 | var (optimalSolution, demandPinningSolution) = adversarialInputGenerator.MaximizeOptimalityGap(optimalEncoder, heuristicEncoder);
45 | Console.WriteLine("Optimal:");
46 | Console.WriteLine(Newtonsoft.Json.JsonConvert.SerializeObject(optimalSolution, Newtonsoft.Json.Formatting.Indented));
47 | Console.WriteLine("****");
48 | Console.WriteLine("Heuristic:");
49 | Console.WriteLine(Newtonsoft.Json.JsonConvert.SerializeObject(demandPinningSolution, Newtonsoft.Json.Formatting.Indented));
50 | Console.WriteLine("****");
51 |
52 | var optimal = ((TEMaxFlowOptimizationSolution)optimalSolution).MaxObjective;
53 | var heuristic = ((TEMaxFlowOptimizationSolution)demandPinningSolution).MaxObjective;
54 | Console.WriteLine($"optimalG={optimal}, heuristicG={heuristic}");
55 | // Assert.IsTrue(TestHelper.IsApproximately(40, optimal));
56 | // Assert.IsTrue(TestHelper.IsApproximately(35, heuristic));
57 | Assert.IsTrue(Utils.IsApproximately(10, optimal - heuristic));
58 | }
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/MetaOptimize/GurobiTimeoutCallback.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize
6 | {
7 | using System;
8 | using System.Diagnostics;
9 | using Gurobi;
10 | class GurobiTimeoutCallback : GRBCallback
11 | {
12 | private GRBModel model;
13 | private Stopwatch timer;
14 | private double timeout;
15 | private double presolvetime_ms = -1;
16 |
17 | public GurobiTimeoutCallback(GRBModel model, double timeout) {
18 | this.model = model;
19 | this.timer = null;
20 | this.timeout = timeout;
21 | if (this.timeout <= 0) {
22 | this.timeout = Double.PositiveInfinity;
23 | }
24 | }
25 |
26 | protected override void Callback()
27 | {
28 | try {
29 | if (where == GRB.Callback.PRESOLVE) {
30 | presolvetime_ms = GetDoubleInfo(GRB.Callback.RUNTIME) * 1000;
31 | } else {
32 | CallCallback(where, presolvetime_ms);
33 | }
34 | } catch (GRBException e) {
35 | Console.WriteLine("Error code: " + e.ErrorCode);
36 | Console.WriteLine(e.Message);
37 | Console.WriteLine(e.StackTrace);
38 | } catch (Exception e) {
39 | Console.WriteLine("Error during callback");
40 | Console.WriteLine(e.StackTrace);
41 | }
42 | throw new Exception("Should not enter this function.");
43 | }
44 | public void CallCallback(int where, double presolvetime_ms,
45 | bool storeLastIfTerminated = false, GurobiStoreProgressCallback storeProgressCallback = null)
46 | {
47 | if (where == GRB.Callback.PRESOLVE) {
48 | this.timer = null;
49 | return;
50 | }
51 | if (where != GRB.Callback.MIP && this.timer == null) {
52 | return;
53 | }
54 | if (this.timer == null) {
55 | Console.WriteLine("Starting the timeout timer");
56 | this.timer = Stopwatch.StartNew();
57 | }
58 | double currTime_ms = timer.ElapsedMilliseconds;
59 | if (currTime_ms > timeout) {
60 | // Utils.AppendToFile(@"../logs/logs.txt", "terminating after = " + currTime_ms);
61 | Console.WriteLine("Terminating After = " + currTime_ms + ", presolve time = " + presolvetime_ms);
62 | if (storeLastIfTerminated) {
63 | storeProgressCallback.WriteLastLineBeforeTermination(currTime_ms);
64 | }
65 | this.model.Terminate();
66 | }
67 | }
68 | public void ResetTermination()
69 | {
70 | this.presolvetime_ms = 0;
71 | this.timer = null;
72 | }
73 | }
74 | }
--------------------------------------------------------------------------------
/Topologies/partitioning/leader_election.py:
--------------------------------------------------------------------------------
1 | from .abstract_partitioning_method import AbstractPartitioningMethod
2 | import numpy as np
3 | import networkx as nx
4 | import time
5 |
6 |
7 | # Randomly partitions the graph, but ensures that each subgraph is contiguous
8 | class LeaderElection(AbstractPartitioningMethod):
9 | def __init__(self, num_partitions=None, seed=0):
10 | super().__init__(num_partitions=num_partitions, weighted=False)
11 | self.seed = seed
12 |
13 | @property
14 | def name(self):
15 | return "leader_election"
16 |
17 | def _partition_impl(self, G, topo_name=""):
18 | if not hasattr(self, "_num_partitions"):
19 | self._num_partitions = self._default_num_partitions(G)
20 |
21 | np.random.seed(self.seed)
22 | # First, select the "seed nodes" for our partitioning. Each seed node
23 | # represents a single partition. The remaining nodes will be assigned to
24 | # one of the seed nodes until every node is assigned
25 | start = time.time()
26 | seed_nodes = np.random.choice(G.nodes, self.num_partitions, replace=False)
27 | partition_vector = np.ones(len(G.nodes), dtype=np.int32) * -1
28 | partition_vector[seed_nodes] = np.arange(self.num_partitions)
29 |
30 | # while there are still unassigned nodes
31 | while np.sum(partition_vector == -1) != 0:
32 | # Select a node that has been unassigned
33 | new_node = np.random.choice(np.argwhere(partition_vector == -1).flatten())
34 |
35 | # From this node, collect all of the partitions that it neighbors
36 | # in the graph. If all of its neighbors have been unassigned, pick
37 | # a new node
38 | neighboring_partitions = np.unique(
39 | [
40 | partition_vector[x]
41 | for x in nx.all_neighbors(G, new_node)
42 | if partition_vector[x] != -1
43 | ]
44 | )
45 |
46 | already_tried = []
47 | while len(neighboring_partitions) == 0:
48 | already_tried.append(new_node)
49 | new_node = np.random.choice(
50 | np.setdiff1d(
51 | np.argwhere(partition_vector == -1).flatten(), already_tried
52 | )
53 | )
54 |
55 | neighboring_partitions = np.unique(
56 | [
57 | partition_vector[x]
58 | for x in nx.all_neighbors(G, new_node)
59 | if partition_vector[x] != -1
60 | ]
61 | )
62 |
63 | # Assign the selected node to one of the partitions it neighbors
64 | partition_assignment = np.random.choice(neighboring_partitions)
65 | partition_vector[new_node] = partition_assignment
66 | self.runtime = time.time() - start
67 |
68 | assert np.sum(partition_vector == -1) == 0
69 | return partition_vector
--------------------------------------------------------------------------------
/MetaOptimize.sln:
--------------------------------------------------------------------------------
1 |
2 | Microsoft Visual Studio Solution File, Format Version 12.00
3 | # Visual Studio Version 17
4 | VisualStudioVersion = 17.1.32414.318
5 | MinimumVisualStudioVersion = 10.0.40219.1
6 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "MetaOptimize", "MetaOptimize\MetaOptimize.csproj", "{EA37F76B-4826-4812-8654-BB8410D3AB57}"
7 | EndProject
8 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "MetaOptimize.Test", "MetaOptimize.Test\MetaOptimize.Test.csproj", "{79E0F923-7D9E-4869-9652-CA7AF63859E5}"
9 | EndProject
10 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "MetaOptimize.Cli", "MetaOptimize.Cli\MetaOptimize.Cli.csproj", "{79D409E3-E0D6-44B7-BA48-07ADD9445155}"
11 | EndProject
12 | Global
13 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
14 | Debug|Any CPU = Debug|Any CPU
15 | Debug|x64 = Debug|x64
16 | Release|Any CPU = Release|Any CPU
17 | Release|x64 = Release|x64
18 | EndGlobalSection
19 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
20 | {EA37F76B-4826-4812-8654-BB8410D3AB57}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
21 | {EA37F76B-4826-4812-8654-BB8410D3AB57}.Debug|Any CPU.Build.0 = Debug|Any CPU
22 | {EA37F76B-4826-4812-8654-BB8410D3AB57}.Debug|x64.ActiveCfg = Debug|x64
23 | {EA37F76B-4826-4812-8654-BB8410D3AB57}.Debug|x64.Build.0 = Debug|x64
24 | {EA37F76B-4826-4812-8654-BB8410D3AB57}.Release|Any CPU.ActiveCfg = Release|Any CPU
25 | {EA37F76B-4826-4812-8654-BB8410D3AB57}.Release|Any CPU.Build.0 = Release|Any CPU
26 | {EA37F76B-4826-4812-8654-BB8410D3AB57}.Release|x64.ActiveCfg = Release|x64
27 | {EA37F76B-4826-4812-8654-BB8410D3AB57}.Release|x64.Build.0 = Release|x64
28 | {79E0F923-7D9E-4869-9652-CA7AF63859E5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
29 | {79E0F923-7D9E-4869-9652-CA7AF63859E5}.Debug|Any CPU.Build.0 = Debug|Any CPU
30 | {79E0F923-7D9E-4869-9652-CA7AF63859E5}.Debug|x64.ActiveCfg = Debug|x64
31 | {79E0F923-7D9E-4869-9652-CA7AF63859E5}.Debug|x64.Build.0 = Debug|x64
32 | {79E0F923-7D9E-4869-9652-CA7AF63859E5}.Release|Any CPU.ActiveCfg = Release|Any CPU
33 | {79E0F923-7D9E-4869-9652-CA7AF63859E5}.Release|Any CPU.Build.0 = Release|Any CPU
34 | {79E0F923-7D9E-4869-9652-CA7AF63859E5}.Release|x64.ActiveCfg = Release|x64
35 | {79E0F923-7D9E-4869-9652-CA7AF63859E5}.Release|x64.Build.0 = Release|x64
36 | {79D409E3-E0D6-44B7-BA48-07ADD9445155}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
37 | {79D409E3-E0D6-44B7-BA48-07ADD9445155}.Debug|Any CPU.Build.0 = Debug|Any CPU
38 | {79D409E3-E0D6-44B7-BA48-07ADD9445155}.Debug|x64.ActiveCfg = Debug|x64
39 | {79D409E3-E0D6-44B7-BA48-07ADD9445155}.Debug|x64.Build.0 = Debug|x64
40 | {79D409E3-E0D6-44B7-BA48-07ADD9445155}.Release|Any CPU.ActiveCfg = Release|Any CPU
41 | {79D409E3-E0D6-44B7-BA48-07ADD9445155}.Release|Any CPU.Build.0 = Release|Any CPU
42 | {79D409E3-E0D6-44B7-BA48-07ADD9445155}.Release|x64.ActiveCfg = Release|x64
43 | {79D409E3-E0D6-44B7-BA48-07ADD9445155}.Release|x64.Build.0 = Release|x64
44 | EndGlobalSection
45 | GlobalSection(SolutionProperties) = preSolution
46 | HideSolutionNode = FALSE
47 | EndGlobalSection
48 | GlobalSection(ExtensibilityGlobals) = postSolution
49 | SolutionGuid = {A64F0750-8829-4EF7-8BD3-97BB73630F58}
50 | EndGlobalSection
51 | EndGlobal
52 |
--------------------------------------------------------------------------------
/Topologies/swan.json:
--------------------------------------------------------------------------------
1 | {
2 | "directed": true,
3 | "multigraph": false,
4 | "graph": {},
5 | "nodes": [
6 | {
7 | "id": 0
8 | },
9 | {
10 | "id": 1
11 | },
12 | {
13 | "id": 2
14 | },
15 | {
16 | "id": 3
17 | },
18 | {
19 | "id": 4
20 | },
21 | {
22 | "id": 5
23 | },
24 | {
25 | "id": 6
26 | },
27 | {
28 | "id": 7
29 | }
30 | ],
31 | "links": [
32 | {
33 | "capacity": 5000,
34 | "source": 0,
35 | "target": 1
36 | },
37 | {
38 | "capacity": 5000,
39 | "source": 0,
40 | "target": 3
41 | },
42 | {
43 | "capacity": 5000,
44 | "source": 1,
45 | "target": 0
46 | },
47 | {
48 | "capacity": 5000,
49 | "source": 1,
50 | "target": 2
51 | },
52 | {
53 | "capacity": 5000,
54 | "source": 2,
55 | "target": 1
56 | },
57 | {
58 | "capacity": 5000,
59 | "source": 2,
60 | "target": 3
61 | },
62 | {
63 | "capacity": 5000,
64 | "source": 2,
65 | "target": 4
66 | },
67 | {
68 | "capacity": 5000,
69 | "source": 2,
70 | "target": 5
71 | },
72 | {
73 | "capacity": 5000,
74 | "source": 3,
75 | "target": 0
76 | },
77 | {
78 | "capacity": 5000,
79 | "source": 3,
80 | "target": 2
81 | },
82 | {
83 | "capacity": 5000,
84 | "source": 3,
85 | "target": 4
86 | },
87 | {
88 | "capacity": 5000,
89 | "source": 3,
90 | "target": 5
91 | },
92 | {
93 | "capacity": 5000,
94 | "source": 4,
95 | "target": 2
96 | },
97 | {
98 | "capacity": 5000,
99 | "source": 4,
100 | "target": 3
101 | },
102 | {
103 | "capacity": 5000,
104 | "source": 4,
105 | "target": 5
106 | },
107 | {
108 | "capacity": 5000,
109 | "source": 4,
110 | "target": 6
111 | },
112 | {
113 | "capacity": 5000,
114 | "source": 5,
115 | "target": 3
116 | },
117 | {
118 | "capacity": 5000,
119 | "source": 5,
120 | "target": 2
121 | },
122 | {
123 | "capacity": 5000,
124 | "source": 5,
125 | "target": 4
126 | },
127 | {
128 | "capacity": 5000,
129 | "source": 5,
130 | "target": 7
131 | },
132 | {
133 | "capacity": 5000,
134 | "source": 6,
135 | "target": 4
136 | },
137 | {
138 | "capacity": 5000,
139 | "source": 6,
140 | "target": 7
141 | },
142 | {
143 | "capacity": 5000,
144 | "source": 7,
145 | "target": 5
146 | },
147 | {
148 | "capacity": 5000,
149 | "source": 7,
150 | "target": 6
151 | }
152 | ]
153 | }
154 |
--------------------------------------------------------------------------------
/MetaOptimize/TrafficEngineering/AdversarialInputSimplifier.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize
6 | {
7 | using System;
8 | using System.Collections.Generic;
9 | using System.Diagnostics;
10 | using System.Linq;
11 | using Gurobi;
12 | using NLog;
13 |
14 | ///
15 | /// Meta-optimization utility functions for simplifying optimality gaps.
16 | ///
17 | public class TEAdversarialInputSimplifier
18 | {
19 | ///
20 | /// The topology for the network.
21 | ///
22 | protected Topology Topology { get; set; }
23 |
24 | ///
25 | /// The maximum number of paths to use between any two nodes.
26 | ///
27 | protected int maxNumPaths { get; set; }
28 |
29 | ///
30 | /// The demand variables.
31 | ///
32 | protected Dictionary<(string, string), Polynomial> DemandVariables { get; set; }
33 |
34 | ///
35 | /// This class simplifies the TE problem and is written to be specific to TE.
36 | /// The idea is you find a gap and then try to find the demand that has the minimum number of non-zero elements that achieves the gap.
37 | ///
38 | public TEAdversarialInputSimplifier(Topology topology, int maxNumPath, Dictionary<(string, string), Polynomial> DemandVariables)
39 | {
40 | this.Topology = topology;
41 | this.maxNumPaths = maxNumPath;
42 | this.DemandVariables = DemandVariables;
43 | }
44 |
45 | ///
46 | /// find minimum number of non-zero demands that achieves the desiredGap
47 | /// using Gurobi Direct Optimzation form.
48 | ///
49 | /// TODO - Engineering: Later add a version of this to a utility
50 | /// class so that other encoders can use it too.
51 | public Polynomial AddDirectMinConstraintsAndObjectives(
52 | ISolver solver,
53 | Polynomial gapObjective,
54 | double desiredGap)
55 | {
56 | // adding optimal - heuristic >= desiredGap
57 | var gapPoly = gapObjective.Negate();
58 | gapPoly.Add(new Term(desiredGap));
59 | solver.AddLeqZeroConstraint(gapPoly);
60 |
61 | // adding f_i <= Mx_i where x_i is binary
62 | var minObj = new Polynomial();
63 | foreach (var pair in this.Topology.GetNodePairs())
64 | {
65 | var auxDemandMinVar = solver.CreateVariable("aux_mindemand_" + pair.Item1 + "_" + pair.Item2, type: GRB.BINARY);
66 | var poly = this.DemandVariables[pair].Copy();
67 | poly.Add(new Term(-1 * this.Topology.MaxCapacity() * this.maxNumPaths, auxDemandMinVar));
68 | solver.AddLeqZeroConstraint(poly);
69 | minObj.Add(new Term(1, auxDemandMinVar));
70 | }
71 | return minObj.Negate();
72 | }
73 | }
74 | }
--------------------------------------------------------------------------------
/Topologies/abilene.json:
--------------------------------------------------------------------------------
1 | {
2 | "directed": true,
3 | "multigraph": false,
4 | "graph": {},
5 | "nodes": [
6 | {
7 | "id": 0
8 | },
9 | {
10 | "id": 1
11 | },
12 | {
13 | "id": 2
14 | },
15 | {
16 | "id": 3
17 | },
18 | {
19 | "id": 4
20 | },
21 | {
22 | "id": 5
23 | },
24 | {
25 | "id": 6
26 | },
27 | {
28 | "id": 7
29 | },
30 | {
31 | "id": 8
32 | },
33 | {
34 | "id": 9
35 | }
36 | ],
37 | "links": [
38 | {
39 | "capacity": 5000,
40 | "source": 0,
41 | "target": 1
42 | },
43 | {
44 | "capacity": 5000,
45 | "source": 0,
46 | "target": 2
47 | },
48 | {
49 | "capacity": 5000,
50 | "source": 1,
51 | "target": 0
52 | },
53 | {
54 | "capacity": 5000,
55 | "source": 1,
56 | "target": 2
57 | },
58 | {
59 | "capacity": 5000,
60 | "source": 1,
61 | "target": 5
62 | },
63 | {
64 | "capacity": 5000,
65 | "source": 2,
66 | "target": 1
67 | },
68 | {
69 | "capacity": 5000,
70 | "source": 2,
71 | "target": 0
72 | },
73 | {
74 | "capacity": 5000,
75 | "source": 2,
76 | "target": 3
77 | },
78 | {
79 | "capacity": 5000,
80 | "source": 3,
81 | "target": 2
82 | },
83 | {
84 | "capacity": 5000,
85 | "source": 3,
86 | "target": 4
87 | },
88 | {
89 | "capacity": 5000,
90 | "source": 3,
91 | "target": 6
92 | },
93 | {
94 | "capacity": 5000,
95 | "source": 4,
96 | "target": 3
97 | },
98 | {
99 | "capacity": 5000,
100 | "source": 4,
101 | "target": 5
102 | },
103 | {
104 | "capacity": 5000,
105 | "source": 4,
106 | "target": 7
107 | },
108 | {
109 | "capacity": 5000,
110 | "source": 5,
111 | "target": 1
112 | },
113 | {
114 | "capacity": 5000,
115 | "source": 5,
116 | "target": 4
117 | },
118 | {
119 | "capacity": 5000,
120 | "source": 6,
121 | "target": 3
122 | },
123 | {
124 | "capacity": 5000,
125 | "source": 6,
126 | "target": 7
127 | },
128 | {
129 | "capacity": 5000,
130 | "source": 6,
131 | "target": 8
132 | },
133 | {
134 | "capacity": 5000,
135 | "source": 7,
136 | "target": 4
137 | },
138 | {
139 | "capacity": 5000,
140 | "source": 7,
141 | "target": 6
142 | },
143 | {
144 | "capacity": 5000,
145 | "source": 7,
146 | "target": 9
147 | },
148 | {
149 | "capacity": 5000,
150 | "source": 8,
151 | "target": 6
152 | },
153 | {
154 | "capacity": 5000,
155 | "source": 8,
156 | "target": 9
157 | },
158 | {
159 | "capacity": 5000,
160 | "source": 9,
161 | "target": 8
162 | },
163 | {
164 | "capacity": 5000,
165 | "source": 9,
166 | "target": 7
167 | }
168 | ]
169 | }
--------------------------------------------------------------------------------
/MetaOptimize/PIFO/PIFOAdversarialInputGenerator.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | using System;
4 | using System.Collections.Generic;
5 | using System.Diagnostics;
6 | using System.Linq;
7 | using System.Threading;
8 | using Gurobi;
9 |
10 | ///
11 | /// Meta-optimization utility functions for maximizing optimality gaps.
12 | ///
13 | public class PIFOAdversarialInputGenerator
14 | {
15 | ///
16 | /// number of packets.
17 | ///
18 | protected int NumPackets { get; set; }
19 |
20 | ///
21 | /// maximum rank for a packet.
22 | ///
23 | protected int MaxRank { get; set; }
24 |
25 | ///
26 | /// variables tracking rank of each packet.
27 | ///
28 | protected Dictionary packetRankVars { get; set; }
29 |
30 | ///
31 | /// Constructor.
32 | ///
33 | public PIFOAdversarialInputGenerator(int numPackets, int maxRank)
34 | {
35 | this.MaxRank = maxRank;
36 | this.NumPackets = numPackets;
37 | }
38 |
39 | private Dictionary CreateRankVariables(ISolver solver)
40 | {
41 | var output = new Dictionary();
42 | for (int pid = 0; pid < this.NumPackets; pid++) {
43 | output[pid] = solver.CreateVariable("rank_" + pid, GRB.INTEGER, lb: 0, ub: this.MaxRank);
44 | }
45 | return output;
46 | }
47 |
48 | ///
49 | /// Find an adversarial input that maximizes the optimality gap between two optimizations.
50 | ///
51 | public (PIFOOptimizationSolution, PIFOOptimizationSolution) MaximizeOptimalityGap(
52 | IEncoder optimalEncoder,
53 | IEncoder heuristicEncoder,
54 | bool cleanUpSolver = true,
55 | bool verbose = false)
56 | {
57 | if (optimalEncoder.Solver != heuristicEncoder.Solver)
58 | {
59 | throw new Exception("Solver mismatch between optimal and heuristic encoders.");
60 | }
61 |
62 | var solver = optimalEncoder.Solver;
63 | if (cleanUpSolver) {
64 | solver.CleanAll();
65 | }
66 |
67 | Utils.logger("creating rank variables.", verbose);
68 | this.packetRankVars = CreateRankVariables(solver);
69 |
70 | Utils.logger("generating optimal encoding.", verbose);
71 | var optimalEncoding = optimalEncoder.Encoding(preRankVariables: this.packetRankVars,
72 | verbose: verbose);
73 | Utils.logger("generating heuristic encoding.", verbose);
74 | var heuristicEncoding = heuristicEncoder.Encoding(preRankVariables: this.packetRankVars,
75 | verbose: verbose);
76 |
77 | Utils.logger("setting the objective.", verbose);
78 | var objective = new Polynomial(
79 | new Term(-1, optimalEncoding.GlobalObjective),
80 | new Term(1, heuristicEncoding.GlobalObjective));
81 | var solution = solver.Maximize(objective, reset: true);
82 |
83 | return ((PIFOOptimizationSolution)optimalEncoder.GetSolution(solution),
84 | (PIFOOptimizationSolution)heuristicEncoder.GetSolution(solution));
85 | }
86 | }
87 | }
--------------------------------------------------------------------------------
/Topologies/verify_demand.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import networkx as nx
3 | from collections import defaultdict
4 | import os
5 | import parse_and_convert_graphml
6 | import itertools
7 | import json
8 |
9 |
10 | fname = f'Cogentco.json'
11 | G = parse_and_convert_graphml.read_graph_json(fname)
12 | # print(G)
13 | print(nx.diameter(G))
14 | # demandFile = f"../logs/realistic_constraints/Cogentco_10_DemandPinning_0.5_0.05_4_2023_1_13_11_27_15_374/" + \
15 | # f"primal_dual_DemandPinning_density_1_maxLargeDistance_1_maxSmallDistance-1_LargeDemandLB_0.25/demands.txt"
16 |
17 | log_dir = "../logs/realistic_constraints/"
18 | # demandFile = f"../logs/realistic_constraints/Cogentco_10_DemandPinning_0.5_0.05_4_2023_1_13_11_27_15_374/" + \
19 | # "primal_dual_DemandPinning_density_1_maxLargeDistance_-1_maxSmallDistance-1_LargeDemandLB_0.25/demands.txt"
20 |
21 | # demand_file_list = [
22 | # "Cogentco_10_DemandPinning_0.5_0.05_4_2023_1_31_20_4_7_686/primal_dual_DemandPinning_density_1_maxLargeDistance_2_maxSmallDistance-1_LargeDemandLB_0.25/demands.txt",
23 | # "Cogentco_10_DemandPinning_0.5_0.05_4_2023_2_1_12_21_16_516/primal_dual_DemandPinning_density_1_maxLargeDistance_-1_maxSmallDistance-1_LargeDemandLB_0.25/demands.txt",
24 | # "Cogentco_10_DemandPinning_0.5_0.05_4_2023_2_1_12_21_16_516/primal_dual_DemandPinning_density_1_maxLargeDistance_4_maxSmallDistance-1_LargeDemandLB_0.25/demands.txt",
25 | # ]
26 |
27 | demand_file_list = [
28 | # "Cogentco_10_DemandPinning_0.5_0.05_4_2023_2_7_5_29_0_748/primal_dual_DemandPinning_density_1_maxLargeDistance_-1_maxSmallDistance-1_LargeDemandLB_0.25/demands.txt",
29 | # "Cogentco_10_DemandPinning_0.5_0.05_4_2023_2_7_5_29_0_748/primal_dual_DemandPinning_density_1_maxLargeDistance_4_maxSmallDistance-1_LargeDemandLB_0.25/demands.txt",
30 | # "Cogentco_10_DemandPinning_0.5_0.05_4_2023_2_8_1_25_49_822/primal_dual_DemandPinning_density_1_maxLargeDistance_5_maxSmallDistance-1_LargeDemandLB_0.25/demands.txt",
31 | # "Cogentco_10_ExpectedPop_0.5_0.05_4_2023_2_8_10_59_35_421/primal_dual_ExpectedPop_density_1_maxLargeDistance_-1_maxSmallDistance-1_LargeDemandLB_0.25/demands.txt",
32 | # "Cogentco_10_ExpectedPop_0.5_0.05_4_2023_2_8_10_59_35_421/primal_dual_ExpectedPop_density_1_maxLargeDistance_4_maxSmallDistance-1_LargeDemandLB_0.25/demands.txt",
33 | "Cogentco_10_ExpectedPop_0.5_0.05_4_2023_2_10_23_19_7_953/primal_dual_ExpectedPop_density_1_maxLargeDistance_-1_maxSmallDistance-1_LargeDemandLB_0.25/demands.txt",
34 | "Cogentco_10_ExpectedPop_0.5_0.05_4_2023_2_11_16_37_13_225/primal_dual_ExpectedPop_density_1_maxLargeDistance_4_maxSmallDistance-1_LargeDemandLB_0.25/demands.txt"
35 | ]
36 |
37 | for demand_file in demand_file_list:
38 | print(f"=============== {demand_file} ===============")
39 | with open(log_dir + demand_file, "r") as fp:
40 | demands = dict(json.load(fp))
41 | print(len(demands))
42 | num_nodes = len(G.nodes())
43 | num_pairs = num_nodes * (num_nodes - 1)
44 | print("num nodes: ", num_nodes)
45 | print("num pairs: ", num_pairs)
46 | num_positive_demands = 0
47 |
48 | cdf_dict = defaultdict(int)
49 | for pair, rate in demands.items():
50 | src, dst = pair.split(", ")
51 | src = int(src[1:])
52 | dst = int(dst[:-1])
53 | if rate > 0.00001:
54 | num_positive_demands += 1
55 | if rate > 0.25:
56 | path_len = nx.shortest_path_length(G, src, dst)
57 | # assert path_len == 1
58 | cdf_dict[path_len] += 1
59 |
60 | print("num non zeros:", num_positive_demands)
61 | print("density:", num_positive_demands / num_pairs)
62 | print("cdf large flows:", cdf_dict)
63 |
--------------------------------------------------------------------------------
/MetaOptimize/McCormickRelaxation.cs:
--------------------------------------------------------------------------------
1 | namespace MetaOptimize
2 | {
3 | // TODO: you need to xplain this a lot better: people with heuristics wont know what a bilinear term is or what mccormick relaxation is.
4 | ///
5 | /// using McCormick enveloped for relaxing bilinear terms.
6 | ///
7 | /// TODO: something here doesn't make sense: the main function, bilinear is taking as input polynomials for both x and y but none of the other ones are passing it a polynomial for x.
8 | /// Double check to make sure the implementation is correct.
9 | public static class McCormickRelaxation
10 | {
11 | ///
12 | /// relaxation.
13 | /// assume x \in [xL, xU].
14 | /// assume y \ine [yL, yU]
15 | /// we replace x * y with z.
16 | /// 1. z \leq xU y + x yL - xU yL.
17 | /// 2. z \leq xL y + x yU - xL yU.
18 | /// 3. z \geq xL y + x yL - xL yL.
19 | /// 4. z \geq xU y + x yU - xU yU.
20 | ///
21 | /// TODO: how come this is not referenced anywhere?
22 | public static void Bilinear(ISolver solver, TVar x, TVar y, TVar output,
23 | double xLB, double xUB, double yLB, double yUB)
24 | {
25 | Bilinear(solver, x, new Polynomial(new Term(1, y)), output, xLB, xUB, yLB, yUB);
26 | }
27 |
28 | ///
29 | /// mccormick relaxation take polynomial as input.
30 | ///
31 | /// TODO: explain hwo the polynomial is mapped to a bilinear term, is it that we have x * y?
32 | public static void Bilinear(ISolver solver, TVar x, Polynomial y, TVar output,
33 | double xLB, double xUB, double yLB, double yUB)
34 | {
35 | Bilinear(solver, x, y, new Polynomial(new Term(1, output)), xLB, xUB, yLB, yUB);
36 | }
37 |
38 | ///
39 | /// mccormick relaxation take polynomial as input.
40 | ///
41 | /// TODO: duplicate description compared to the one above it.
42 | public static void Bilinear(ISolver solver, TVar x, Polynomial y, Polynomial output,
43 | double xLB, double xUB, double yLB, double yUB)
44 | {
45 | Bilinear(solver, new Polynomial(new Term(1, x)), y, output, xLB, xUB, yLB, yUB);
46 | }
47 |
48 | ///
49 | /// mccormick relaxation take polynomial as input.
50 | ///
51 | public static void Bilinear(ISolver solver, Polynomial x, Polynomial y, Polynomial output,
52 | double xLB, double xUB, double yLB, double yUB)
53 | {
54 | var constr1 = new Polynomial(new Term(xUB * yLB));
55 | constr1.Add(output.Copy());
56 | constr1.Add(x.Multiply(-yLB));
57 | constr1.Add(y.Multiply(-xUB));
58 | solver.AddLeqZeroConstraint(constr1);
59 |
60 | var constr2 = new Polynomial(new Term(xLB * yUB));
61 | constr2.Add(output.Copy());
62 | constr2.Add(x.Multiply(-yUB));
63 | constr2.Add(y.Multiply(-xLB));
64 | solver.AddLeqZeroConstraint(constr2);
65 |
66 | var constr3 = new Polynomial(new Term(-xLB * yLB));
67 | constr3.Add(output.Negate());
68 | constr3.Add(x.Multiply(yLB));
69 | constr3.Add(y.Multiply(xLB));
70 | solver.AddLeqZeroConstraint(constr3);
71 |
72 | var constr4 = new Polynomial(new Term(-xUB * yUB));
73 | constr4.Add(output.Negate());
74 | constr4.Add(x.Multiply(yUB));
75 | constr4.Add(y.Multiply(xUB));
76 | solver.AddLeqZeroConstraint(constr4);
77 | }
78 | }
79 | }
--------------------------------------------------------------------------------
/MetaOptimize.Cli/Program.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Cli
6 | {
7 | using CommandLine;
8 |
9 | ///
10 | /// Entry point for MetaOptimize CLI.
11 | /// Routes to different problem solvers based on command-line arguments.
12 | ///
13 | ///
14 | /// Supports four problem types:
15 | /// - TrafficEngineering: Find worst-case demand patterns for routing heuristics
16 | /// - BinPacking: Find adversarial item sizes that maximize FFD vs optimal gap
17 | /// - PIFO: Find packet sequences that maximize scheduling inversions
18 | /// - FailureAnalysis: Analyze network resilience under link failures
19 | ///
20 | /// Uses CommandLineParser for argument parsing with CliOptions.
21 | ///
22 | public class Program
23 | {
24 | ///
25 | /// Main entry point for the program.
26 | /// Parses command-line arguments and dispatches to the appropriate runner.
27 | ///
28 | /// Command-line arguments.
29 | public static void Main(string[] args)
30 | {
31 | var parseResult = CommandLine.Parser.Default.ParseArguments(args);
32 |
33 | parseResult.WithParsed(opts =>
34 | {
35 | CliOptions.Instance = opts;
36 | RunWithOptions(opts);
37 | });
38 |
39 | parseResult.WithNotParsed(errors =>
40 | {
41 | // CommandLineParser prints help/errors automatically
42 | Environment.Exit(1);
43 | });
44 | }
45 |
46 | ///
47 | /// Executes the appropriate runner based on the problem type.
48 | ///
49 | /// Parsed command-line options.
50 | private static void RunWithOptions(CliOptions opts)
51 | {
52 | try
53 | {
54 | // Debug output
55 | if (opts.Verbose)
56 | {
57 | Console.WriteLine($"[DEBUG] UseDefaultTopology: {opts.UseDefaultTopology}");
58 | Console.WriteLine($"[DEBUG] BreakSymmetry: {opts.BreakSymmetry}");
59 | Console.WriteLine($"[DEBUG] EnableClustering: {opts.EnableClustering}");
60 | Console.WriteLine($"[DEBUG] Verbose: {opts.Verbose}");
61 | Console.WriteLine($"[DEBUG] Debug: {opts.Debug}");
62 | }
63 |
64 | switch (opts.ProblemType)
65 | {
66 | case ProblemType.TrafficEngineering:
67 | TERunner.Run(opts);
68 | break;
69 |
70 | case ProblemType.BinPacking:
71 | BPRunner.Run(opts);
72 | break;
73 |
74 | case ProblemType.PIFO:
75 | PIFORunner.Run(opts);
76 | break;
77 |
78 | case ProblemType.FailureAnalysis:
79 | FailureAnalysisRunner.Run(opts);
80 | break;
81 |
82 | default:
83 | Console.WriteLine($"ERROR: Unknown problem type '{opts.ProblemType}'");
84 | Environment.Exit(1);
85 | break;
86 | }
87 |
88 | Console.WriteLine(new string('=', 60));
89 | Console.WriteLine("Execution completed successfully.");
90 | }
91 | catch (Exception ex)
92 | {
93 | Console.WriteLine($"\nERROR: {ex.Message}");
94 | Console.WriteLine($"Stack Trace:\n{ex.StackTrace}");
95 | Environment.Exit(1);
96 | }
97 | }
98 | }
99 | }
--------------------------------------------------------------------------------
/Topologies/partitioning/spectral_clustering.py:
--------------------------------------------------------------------------------
1 | from .abstract_partitioning_method import AbstractPartitioningMethod
2 | from sklearn.cluster import KMeans
3 | from .utils import all_partitions_contiguous
4 | import numpy as np
5 | import networkx as nx
6 | import time
7 |
8 |
9 | # Run NJW spectral clustering, use eigengap heuristic to select the number of partitions
10 | class SpectralClustering(AbstractPartitioningMethod):
11 |
12 | def __init__(self, num_partitions=None, weighted=True, seed=0):
13 | super().__init__(num_partitions=num_partitions, weighted=weighted)
14 | if weighted:
15 | self._adj_mat = lambda G: np.asarray(
16 | nx.adjacency_matrix(G, weight='capacity').todense(), dtype=np.float64)
17 | else:
18 | self._adj_mat = lambda G: np.asarray(
19 | nx.adjacency_matrix(G, weight='').todense(), dtype=np.float64)
20 | self.seed = seed
21 |
22 | @property
23 | def name(self):
24 | return 'spectral_clustering'
25 |
26 | def run_k_means_on_eigenvectors(self, eigvecs, num_nodes):
27 | start = time.time()
28 | V = eigvecs[:, :self._num_partitions]
29 | U = V / np.linalg.norm(V, axis=1).reshape(num_nodes, 1)
30 |
31 | k_means = KMeans(self._num_partitions, n_init=100, random_state=self.seed).fit(U)
32 | self.runtime = time.time() - start
33 | return k_means.labels_
34 |
35 | # Normalized spectral clustering according to Ng, Jordan, and Weiss (2002)
36 | def _partition_impl(self, G, topo_name=""):
37 |
38 | def is_symmetric(a, rtol=1e-05, atol=1e-08):
39 | return np.allclose(a, a.T, rtol=rtol, atol=atol)
40 |
41 | def is_pos_semi_def(x):
42 | return np.all(np.linalg.eigvals(x) >= -1e-5)
43 |
44 | G = G.copy()
45 | num_nodes = len(G.nodes)
46 | W = self._adj_mat(G.to_undirected())
47 |
48 | # 1) Build Laplacian matrix L of the graph
49 | # = I − D−1/2W D−1/2, where D−1/2 is a diagonal matrix with (D−1/2)ii = (Dii)−1/2
50 | D = np.diag(np.sum(W, axis=1))
51 | D_norm = np.power(D, -0.5)
52 | D_norm[D_norm == np.inf] = 0.0
53 | L = np.identity(W.shape[0]) - D_norm.dot(W).dot(D_norm)
54 | assert is_symmetric(L)
55 | assert is_pos_semi_def(L)
56 |
57 | # 2) Find eigenvalues and eigenvalues of L
58 | eigvals, eigvecs = np.linalg.eig(L)
59 | eigvals, eigvecs = eigvals.astype(np.float32), eigvecs.astype(np.float32)
60 | eigvecs = eigvecs[:, np.argsort(eigvals)]
61 | eigvals = eigvals[np.argsort(eigvals)]
62 | self.eigenvals = eigvals
63 |
64 | # 3) If number of partitions was not set, find largest eigengap between eigenvalues. If resulting
65 | # partition is not contiguous, try the 2nd-largest eigengap, and so on...
66 | if not hasattr(self, '_num_partitions'):
67 | max_num_parts = int(num_nodes / 4)
68 | print('Using eigengap heuristic to select number of partitions, max: {}'.format(max_num_parts))
69 | self.eigengaps = np.array([eigvals[i+1] - eigvals[i] for i in range(len(eigvals[:max_num_parts]) - 1)])
70 |
71 | k = 0
72 | indices = self.eigengaps.argsort()[::-1]
73 |
74 | while k < len(indices):
75 | self._num_partitions = indices[k]
76 | print('Trying {} partitions'.format(self._num_partitions))
77 | p_v = self.run_k_means_on_eigenvectors(eigvecs, num_nodes)
78 | if all_partitions_contiguous(G, p_v):
79 | break
80 | k += 1
81 | if k == len(indices):
82 | raise Exception('could not find valid partitioning')
83 |
84 | print('Eigengap heuristic selected {} partitions'.format(self._num_partitions))
85 | return p_v
86 |
87 | else:
88 | return self.run_k_means_on_eigenvectors(eigvecs, num_nodes)
89 |
--------------------------------------------------------------------------------
/MetaOptimize/GurobiStoreProgressCallback.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize
6 | {
7 | using System;
8 | using Gurobi;
9 | class GurobiStoreProgressCallback : GRBCallback
10 | {
11 | private GRBModel model;
12 | private String dirname;
13 | private String filename;
14 | private double timeBias = 0.0;
15 | private double presolvetime_ms = -1;
16 | private double bstObj = Double.NegativeInfinity;
17 | private double lastTime = -1;
18 |
19 | public GurobiStoreProgressCallback(GRBModel model, String dirname, String filename)
20 | {
21 | this.model = model;
22 | this.dirname = dirname;
23 | this.filename = filename;
24 | Utils.CreateFile(dirname, filename, removeIfExist: false);
25 | Console.WriteLine("will store the progress in dir: " + this.dirname + " on file " + this.filename);
26 | // this.presolvetimer = null;
27 | Utils.AppendToFile(this.dirname, this.filename, 0 + ", " + 0);
28 | }
29 |
30 | protected override void Callback()
31 | {
32 | try
33 | {
34 | if (where == GRB.Callback.PRESOLVE)
35 | {
36 | this.presolvetime_ms = GetDoubleInfo(GRB.Callback.RUNTIME) * 1000;
37 | }
38 | else if (where == GRB.Callback.MIP)
39 | {
40 | var obj = GetDoubleInfo(GRB.Callback.MIP_OBJBST);
41 | var currtime_ms = GetDoubleInfo(GRB.Callback.RUNTIME);
42 | CallCallback(obj, currtime_ms, this.presolvetime_ms);
43 | }
44 | }
45 | catch (GRBException e)
46 | {
47 | Console.WriteLine("Error code: " + e.ErrorCode);
48 | Console.WriteLine(e.Message);
49 | Console.WriteLine(e.StackTrace);
50 | }
51 | catch (Exception e)
52 | {
53 | Console.WriteLine("Error during callback");
54 | Console.WriteLine(e.StackTrace);
55 | }
56 | throw new Exception("Should not enter this function.");
57 | }
58 |
59 | public void CallCallback(double objective, double currtime_ms, double presolvetime_ms)
60 | {
61 | this.bstObj = Math.Max(this.bstObj, objective);
62 | double time = timeBias + currtime_ms - presolvetime_ms;
63 | Utils.AppendToFile(dirname, filename, time + ", " + this.bstObj);
64 | this.lastTime = time;
65 | }
66 |
67 | public void WriteLastLineBeforeTermination(double finaltime_ms)
68 | {
69 | // Utils.AppendToFile(@"../logs/logs.txt", " last time = " + lastTime + " final time = " + finaltime_ms);
70 | finaltime_ms += timeBias;
71 | if (finaltime_ms > lastTime)
72 | {
73 | Utils.AppendToFile(dirname, filename, finaltime_ms + ", " + this.bstObj);
74 | this.lastTime = finaltime_ms;
75 | }
76 | }
77 |
78 | public void AppendToStoreProgressFile(double time_ms, double gap)
79 | {
80 | if (time_ms > lastTime)
81 | {
82 | this.bstObj = Math.Max(this.bstObj, gap);
83 | Utils.AppendToFile(dirname, filename, time_ms + ", " + this.bstObj);
84 | this.lastTime = time_ms;
85 | }
86 | }
87 |
88 | public void ResetProgressTimer()
89 | {
90 | this.presolvetime_ms = 0;
91 | this.timeBias = Double.Parse(Utils.readLastLineFile(this.dirname, this.filename).Split(", ")[0]);
92 | Utils.AppendToFile(dirname, filename, "========= Reset Called ========");
93 | // Utils.AppendToFile(@"../logs/logs.txt", "time bias = " + timeBias);
94 | }
95 | }
96 | }
--------------------------------------------------------------------------------
/Topologies/partitioning/leader_election_uniform.py:
--------------------------------------------------------------------------------
1 | from .abstract_partitioning_method import AbstractPartitioningMethod
2 | import numpy as np
3 | import networkx as nx
4 | import time
5 |
6 |
7 | # Randomly partitions the graph, but ensures that each subgraph is contiguous
8 | class LeaderElectionUniform(AbstractPartitioningMethod):
9 | def __init__(self, num_partitions=None, seed=0, tolerance=0.2):
10 | super().__init__(num_partitions=num_partitions, weighted=False)
11 | self.seed = seed
12 | self.tolerance = 1 + tolerance
13 |
14 | @property
15 | def name(self):
16 | return "leader_election_uniform"
17 |
18 | def _partition_impl(self, G, topo_name=""):
19 | if not hasattr(self, "_num_partitions"):
20 | self._num_partitions = self._default_num_partitions(G)
21 |
22 | np.random.seed(self.seed)
23 | # First, select the "seed nodes" for our partitioning. Each seed node
24 | # represents a single partition. The remaining nodes will be assigned to
25 | # one of the seed nodes until every node is assigned
26 | start = time.time()
27 | seed_nodes = np.random.choice(G.nodes, self.num_partitions, replace=False)
28 | partition_vector = np.ones(len(G.nodes), dtype=np.int32) * -1
29 | partition_vector[seed_nodes] = np.arange(self.num_partitions)
30 | uniform_partition_num = len(G.nodes) // self.num_partitions
31 |
32 | # while there are still unassigned nodes
33 | partition_try_num = 0
34 | while np.sum(partition_vector == -1) != 0:
35 | # Select a node that has been unassigned
36 | new_node = np.random.choice(np.argwhere(partition_vector == -1).flatten())
37 |
38 | # From this node, collect all of the partitions that it neighbors
39 | # in the graph. If all of its neighbors have been unassigned, pick
40 | # a new node
41 | neighboring_partitions = np.unique(
42 | [
43 | partition_vector[x]
44 | for x in nx.all_neighbors(G, new_node)
45 | if partition_vector[x] != -1 and np.sum(partition_vector == partition_vector[x]) <= self.tolerance * (uniform_partition_num + 1)
46 | ]
47 | )
48 |
49 | already_tried = []
50 | while len(neighboring_partitions) == 0 and len(already_tried) < np.sum(partition_vector == -1) - 1:
51 | already_tried.append(new_node)
52 | new_node = np.random.choice(
53 | np.setdiff1d(
54 | np.argwhere(partition_vector == -1).flatten(), already_tried
55 | )
56 | )
57 |
58 | neighboring_partitions = np.unique(
59 | [
60 | partition_vector[x]
61 | for x in nx.all_neighbors(G, new_node)
62 | if partition_vector[x] != -1 and
63 | np.sum(partition_vector == partition_vector[x]) <= self.tolerance * (uniform_partition_num + 1)
64 | ]
65 | )
66 |
67 | if len(neighboring_partitions) == 0:
68 | partition_try_num += 1
69 | print(f" try {partition_try_num} invalid partitioning...")
70 | seed_nodes = np.random.choice(G.nodes, self.num_partitions, replace=False)
71 | partition_vector = np.ones(len(G.nodes), dtype=np.int32) * -1
72 | partition_vector[seed_nodes] = np.arange(self.num_partitions)
73 | uniform_partition_num = len(G.nodes) // self.num_partitions
74 | continue
75 | # Assign the selected node to one of the partitions it neighbors
76 | partition_assignment = np.random.choice(neighboring_partitions)
77 | partition_vector[new_node] = partition_assignment
78 | self.runtime = time.time() - start
79 |
80 | assert np.sum(partition_vector == -1) == 0
81 | return partition_vector
--------------------------------------------------------------------------------
/Topologies/partitioning/fm_partitioning.py:
--------------------------------------------------------------------------------
1 | from .abstract_partitioning_method import AbstractPartitioningMethod
2 | import hashlib
3 | import numpy as np
4 | import os
5 | import re
6 | import subprocess
7 |
8 |
9 | class FMPartitioning(AbstractPartitioningMethod):
10 | TL_DIR = os.path.realpath(os.path.join(os.path.dirname(__file__), ".."))
11 | # assumes this file to be in $ROOT/py/partitioning; then, the rundir is at $ROOT/fm_rundir
12 | run_folder = os.path.join(TL_DIR, "ext", "modularity", "rundir")
13 | if not os.path.exists(run_folder):
14 | os.makedirs(run_folder)
15 | exe_folder = os.path.join(TL_DIR, "ext", "modularity", "FastCommunity_w_GPL_v1.0.1")
16 | fm_exe = os.path.join(exe_folder, "FastCommunity_wMH")
17 | if not os.path.exists(fm_exe):
18 | curr_dir = os.getcwd()
19 | os.chdir(exe_folder)
20 | subprocess.call(["make"])
21 | os.chdir(curr_dir)
22 |
23 | opt_num_partitions = {}
24 |
25 | def __init__(self, num_partitions=None):
26 | super().__init__(num_partitions=num_partitions, weighted=False)
27 |
28 | @property
29 | def name(self):
30 | return "fm_partitioning"
31 |
32 | def _partition_impl(self, G, topo_name = "", all_tm_files=[]):
33 | topo = topo_name
34 | # write weighted pairs to file
35 | rnd_number = topo_name + str(np.random.random() * 1000)
36 | wpfile = os.path.join(self.run_folder, rnd_number + ".wpairs")
37 | with open(wpfile, "w") as outF:
38 | seen = set()
39 | for (u, v, c) in G.edges.data("capacity"):
40 | seen.add((u, v))
41 | wt = c
42 | outF.write("%d\t%d\t%d\n" % (u, v, wt))
43 |
44 | if not hasattr(self, "_num_partitions"):
45 | # Run without num partitions argument to determine optimal number
46 | # of partitions based on modularity
47 | cmd = (
48 | self.fm_exe
49 | + " -f "
50 | + wpfile
51 | + ' | grep ^Q | sort -g -r -k3 | head -n 1 | sed -e "s/\[/ /g" | sed -e "s/\]/ /g" | awk \'{print $2}\''
52 | )
53 | print("cmd=[{}]".format(cmd))
54 |
55 | self._num_partitions = len(G.nodes) - int(os.popen(cmd).read())
56 | print("opt #partitions= ", self._num_partitions)
57 |
58 | # Run with num partitions argument, save to temporary output file
59 | fm_param = len(G.nodes) - self._num_partitions
60 | cmd = self.fm_exe + " -f " + wpfile + " -c " + str(fm_param)
61 | print("cmd=[{}]".format(cmd))
62 |
63 | temp_fname = hashlib.md5(np.int64(np.random.randint(2 ** 31 - 1))).hexdigest()
64 | with open(temp_fname, "w") as w:
65 | subprocess.call(cmd, shell=True, stdout=w)
66 |
67 | # Extract time
68 | output = os.popen('grep "Total Time:" {}'.format(temp_fname)).read()
69 | match = re.match("Total Time: (\d+(\.\d+)?) seconds", output)
70 | self.runtime = float(match.group(1))
71 |
72 | # Extract modularity score
73 | output = os.popen(
74 | 'grep "^Q\['
75 | + str(fm_param)
76 | + '\]" '
77 | + temp_fname
78 | + ' | sed -e "s/\[/ /g" | sed -e "s/\]/ /g" | awk \'{print $4}\''
79 | ).read()
80 | self.modularity = float(output)
81 | print("Modularity:", self.modularity)
82 |
83 | # Remove temporary output file
84 | os.remove(temp_fname)
85 |
86 | # Read partition vector from output file generated by FM
87 | partition_vector = np.ones(len(G.nodes), dtype=np.int32) * -1
88 | currgroup = -1
89 | with open(os.path.join(self.run_folder, rnd_number + "-fc_a.groups"), "r") as groups:
90 | for line in groups:
91 | if line.startswith("GROUP"):
92 | currgroup += 1
93 | else:
94 | partition_vector[int(line)] = currgroup
95 |
96 | assert np.sum(partition_vector == -1) == 0
97 | return partition_vector
--------------------------------------------------------------------------------
/MetaOptimize.Test/TopologyTests.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using System.Linq;
8 | using Microsoft.VisualStudio.TestTools.UnitTesting;
9 |
10 | ///
11 | /// Some basic topology tests.
12 | ///
13 | [TestClass]
14 | public class TopologyTests
15 | {
16 | ///
17 | /// Test that maximization works via the kkt conditions.
18 | ///
19 | [TestMethod]
20 | public void TestPathEnumeration()
21 | {
22 | var topology = new Topology();
23 | topology.AddNode("a");
24 | topology.AddNode("b");
25 | topology.AddNode("c");
26 | topology.AddNode("d");
27 | topology.AddEdge("a", "b", capacity: 10);
28 | topology.AddEdge("a", "c", capacity: 10);
29 | topology.AddEdge("b", "d", capacity: 10);
30 | topology.AddEdge("c", "d", capacity: 10);
31 |
32 | var paths = topology.SimplePaths("a", "d").ToArray();
33 |
34 | Assert.AreEqual(2, paths.Length);
35 |
36 | Assert.AreEqual("a", paths[0][0]);
37 | Assert.AreEqual("c", paths[0][1]);
38 | Assert.AreEqual("d", paths[0][2]);
39 |
40 | Assert.AreEqual("a", paths[1][0]);
41 | Assert.AreEqual("b", paths[1][1]);
42 | Assert.AreEqual("d", paths[1][2]);
43 | }
44 |
45 | ///
46 | /// Test that computing k shortest paths works.
47 | ///
48 | [TestMethod]
49 | public void TestKShortestPaths1()
50 | {
51 | var topology = new Topology();
52 | topology.AddNode("a");
53 | topology.AddNode("b");
54 | topology.AddNode("c");
55 | topology.AddNode("d");
56 | topology.AddEdge("a", "b", capacity: 10);
57 | topology.AddEdge("a", "c", capacity: 10);
58 | topology.AddEdge("b", "d", capacity: 10);
59 | topology.AddEdge("c", "d", capacity: 10);
60 |
61 | var paths = topology.ShortestKPaths(1, "a", "d").ToArray();
62 |
63 | Assert.AreEqual(1, paths.Length);
64 |
65 | Assert.AreEqual("a", paths[0][0]);
66 | Assert.AreEqual("b", paths[0][1]);
67 | Assert.AreEqual("d", paths[0][2]);
68 |
69 | paths = topology.ShortestKPaths(2, "a", "d").ToArray();
70 |
71 | Assert.AreEqual(2, paths.Length);
72 |
73 | Assert.AreEqual("a", paths[0][0]);
74 | Assert.AreEqual("b", paths[0][1]);
75 | Assert.AreEqual("d", paths[0][2]);
76 |
77 | Assert.AreEqual("a", paths[1][0]);
78 | Assert.AreEqual("c", paths[1][1]);
79 | Assert.AreEqual("d", paths[1][2]);
80 | }
81 |
82 | ///
83 | /// Test that computing k shortest paths works.
84 | ///
85 | [TestMethod]
86 | public void TestKShortestPaths2()
87 | {
88 | var topology = new Topology();
89 | topology.AddNode("a");
90 | topology.AddNode("b");
91 | topology.AddNode("c");
92 | topology.AddNode("d");
93 | topology.AddEdge("a", "b", capacity: 10);
94 | topology.AddEdge("a", "c", capacity: 8);
95 | topology.AddEdge("a", "d", capacity: 10);
96 | topology.AddEdge("b", "d", capacity: 10);
97 | topology.AddEdge("c", "d", capacity: 10);
98 |
99 | var paths = topology.ShortestKPaths(4, "a", "d").ToArray();
100 |
101 | Assert.AreEqual(3, paths.Length);
102 |
103 | Assert.AreEqual("a", paths[0][0]);
104 | Assert.AreEqual("d", paths[0][1]);
105 |
106 | Assert.AreEqual("a", paths[1][0]);
107 | Assert.IsTrue(("c" == paths[1][1]) || ("b" == paths[1][1]));
108 | Assert.AreEqual("d", paths[1][2]);
109 |
110 | Assert.AreEqual("a", paths[2][0]);
111 | Assert.IsTrue(("c" == paths[2][1]) || ("b" == paths[2][1]));
112 | Assert.AreEqual("d", paths[2][2]);
113 | }
114 | }
115 | }
--------------------------------------------------------------------------------
/Topologies/b4-teavar.json:
--------------------------------------------------------------------------------
1 | {
2 | "directed": true,
3 | "multigraph": false,
4 | "graph": {},
5 | "nodes": [
6 | {
7 | "id": 0
8 | },
9 | {
10 | "id": 1
11 | },
12 | {
13 | "id": 2
14 | },
15 | {
16 | "id": 4
17 | },
18 | {
19 | "id": 3
20 | },
21 | {
22 | "id": 5
23 | },
24 | {
25 | "id": 6
26 | },
27 | {
28 | "id": 7
29 | },
30 | {
31 | "id": 10
32 | },
33 | {
34 | "id": 9
35 | },
36 | {
37 | "id": 8
38 | },
39 | {
40 | "id": 11
41 | }
42 | ],
43 | "links": [
44 | {
45 | "capacity": 5000,
46 | "source": 0,
47 | "target": 1
48 | },
49 | {
50 | "capacity": 5000,
51 | "source": 0,
52 | "target": 2
53 | },
54 | {
55 | "capacity": 5000,
56 | "source": 1,
57 | "target": 0
58 | },
59 | {
60 | "capacity": 5000,
61 | "source": 1,
62 | "target": 4
63 | },
64 | {
65 | "capacity": 5000,
66 | "source": 2,
67 | "target": 0
68 | },
69 | {
70 | "capacity": 5000,
71 | "source": 2,
72 | "target": 3
73 | },
74 | {
75 | "capacity": 5000,
76 | "source": 2,
77 | "target": 5
78 | },
79 | {
80 | "capacity": 5000,
81 | "source": 4,
82 | "target": 1
83 | },
84 | {
85 | "capacity": 5000,
86 | "source": 4,
87 | "target": 3
88 | },
89 | {
90 | "capacity": 5000,
91 | "source": 4,
92 | "target": 5
93 | },
94 | {
95 | "capacity": 5000,
96 | "source": 3,
97 | "target": 2
98 | },
99 | {
100 | "capacity": 5000,
101 | "source": 3,
102 | "target": 4
103 | },
104 | {
105 | "capacity": 5000,
106 | "source": 3,
107 | "target": 6
108 | },
109 | {
110 | "capacity": 5000,
111 | "source": 3,
112 | "target": 7
113 | },
114 | {
115 | "capacity": 5000,
116 | "source": 5,
117 | "target": 2
118 | },
119 | {
120 | "capacity": 5000,
121 | "source": 5,
122 | "target": 4
123 | },
124 | {
125 | "capacity": 5000,
126 | "source": 5,
127 | "target": 6
128 | },
129 | {
130 | "capacity": 5000,
131 | "source": 5,
132 | "target": 7
133 | },
134 | {
135 | "capacity": 5000,
136 | "source": 6,
137 | "target": 3
138 | },
139 | {
140 | "capacity": 5000,
141 | "source": 6,
142 | "target": 5
143 | },
144 | {
145 | "capacity": 5000,
146 | "source": 6,
147 | "target": 7
148 | },
149 | {
150 | "capacity": 5000,
151 | "source": 6,
152 | "target": 10
153 | },
154 | {
155 | "capacity": 5000,
156 | "source": 7,
157 | "target": 3
158 | },
159 | {
160 | "capacity": 5000,
161 | "source": 7,
162 | "target": 5
163 | },
164 | {
165 | "capacity": 5000,
166 | "source": 7,
167 | "target": 6
168 | },
169 | {
170 | "capacity": 5000,
171 | "source": 7,
172 | "target": 9
173 | },
174 | {
175 | "capacity": 5000,
176 | "source": 10,
177 | "target": 6
178 | },
179 | {
180 | "capacity": 5000,
181 | "source": 10,
182 | "target": 8
183 | },
184 | {
185 | "capacity": 5000,
186 | "source": 10,
187 | "target": 9
188 | },
189 | {
190 | "capacity": 5000,
191 | "source": 10,
192 | "target": 11
193 | },
194 | {
195 | "capacity": 5000,
196 | "source": 9,
197 | "target": 7
198 | },
199 | {
200 | "capacity": 5000,
201 | "source": 9,
202 | "target": 8
203 | },
204 | {
205 | "capacity": 5000,
206 | "source": 9,
207 | "target": 10
208 | },
209 | {
210 | "capacity": 5000,
211 | "source": 9,
212 | "target": 11
213 | },
214 | {
215 | "capacity": 5000,
216 | "source": 8,
217 | "target": 9
218 | },
219 | {
220 | "capacity": 5000,
221 | "source": 8,
222 | "target": 10
223 | },
224 | {
225 | "capacity": 5000,
226 | "source": 11,
227 | "target": 9
228 | },
229 | {
230 | "capacity": 5000,
231 | "source": 11,
232 | "target": 10
233 | }
234 | ]
235 | }
--------------------------------------------------------------------------------
/MetaOptimize/GurobiBinary.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.IO;
4 | using System.Linq;
5 | using Gurobi;
6 |
7 | namespace MetaOptimize
8 | {
9 | ///
10 | /// Implements a sanitized version of SolverGurobi.
11 | ///
12 | public class GurobiBinary : GurobiSOS
13 | {
14 | private double _bigM = Math.Pow(10, 8);
15 | private double _tolerance = Math.Pow(10, -8);
16 | private double _scale = Math.Pow(10, -5);
17 |
18 | ///
19 | /// Scales a polynomial.
20 | ///
21 | ///
22 | public Polynomial scale(Polynomial poly)
23 | {
24 | foreach (var term in poly.GetTerms())
25 | {
26 | term.Coefficient *= this._scale;
27 | }
28 | return poly;
29 | }
30 |
31 | ///
32 | /// Constructor.
33 | ///
34 | public GurobiBinary(double timeout = double.PositiveInfinity, int verbose = 0, int numThreads = 0, double timeToTerminateNoImprovement = -1,
35 | bool recordProgress = false, string logPath = null) : base(timeout, verbose, numThreads, timeToTerminateNoImprovement, recordProgress, logPath)
36 | {
37 | }
38 |
39 | ///
40 | /// Wrapper that convers the new types to guroubi types and then
41 | /// calls the proper function.
42 | ///
43 | ///
44 | ///
45 | public override void AddOrEqZeroConstraint(Polynomial polynomial1, Polynomial polynomial2)
46 | {
47 | GRBLinExpr poly1 = this.fromPolyToLinExpr(this.scale(polynomial1));
48 | GRBLinExpr poly2 = this.fromPolyToLinExpr(this.scale(polynomial2));
49 | GRBLinExpr poly2Neg = this.fromPolyToLinExpr(polynomial2.Negate());
50 |
51 | var alpha = this._model.AddVar(0.0, 1.0, 0.0, GRB.BINARY, "binary_" + this._auxiliaryVars.Count);
52 | this._auxiliaryVars.Add($"binary_{this._auxiliaryVars.Count}", alpha);
53 |
54 | poly1.AddTerm(-1 * this._bigM * this._scale, alpha);
55 |
56 | poly2.AddTerm(this._bigM * this._scale, alpha);
57 | poly2.AddConstant(-1 * this._bigM * this._scale);
58 |
59 | poly2Neg.AddTerm(this._bigM * this._scale, alpha);
60 | poly2Neg.AddConstant(-1 * this._bigM * this._scale);
61 |
62 | this._model.AddConstr(poly1, GRB.LESS_EQUAL, 0.0, "ineq_index_" + this._constraintIneqCount++);
63 | this._model.AddConstr(poly2, GRB.LESS_EQUAL, 0.0, "ineq_index_" + this._constraintIneqCount++);
64 | this._model.AddConstr(poly2Neg, GRB.LESS_EQUAL, 0.0, "ineq_index_" + this._constraintIneqCount++);
65 | }
66 | ///
67 | /// Maximize the objective.
68 | ///
69 | /// A solution.
70 | public override GRBModel Maximize()
71 | {
72 | Console.WriteLine("in maximize call");
73 | GRBLinExpr objective = 0;
74 | foreach (var auxVar in this.auxPolyList)
75 | {
76 | objective += this.fromPolyToLinExpr(auxVar) * (1 / this._bigM);
77 | }
78 | this._model.SetObjective(objective + this._objective, GRB.MAXIMIZE);
79 | // this._model.Parameters.DualReductions = 0;
80 | // this._model.Parameters.MIPFocus = 3;
81 | // this._model.Parameters.Cuts = 3;
82 | // this._model.Parameters.Heuristics = 0.5;
83 |
84 | this._model.Set(GRB.DoubleParam.IntFeasTol, this._tolerance);
85 |
86 | // string exhaust_dir_name = @"c:\tmp\grbsos_exhaust\rand_" + (new Random()).Next(1000000) + @"\";
87 | // Directory.CreateDirectory(exhaust_dir_name);
88 | // this._model.Write($"{exhaust_dir_name}\\model_" + DateTime.Now.Millisecond + ".lp");
89 | this._model.Optimize();
90 | if (this._model.Status != GRB.Status.TIME_LIMIT & this._model.Status != GRB.Status.OPTIMAL & this._model.Status != GRB.Status.INTERRUPTED)
91 | {
92 | throw new Exception($"model not optimal {ModelStatusToString(this._model.Status)}");
93 | }
94 |
95 | return this._model;
96 | }
97 | }
98 | }
99 |
--------------------------------------------------------------------------------
/Topologies/partition_network.py:
--------------------------------------------------------------------------------
1 | import imp
2 | import itertools
3 | import parse_and_convert_graphml
4 | from partitioning.spectral_clustering import SpectralClustering
5 | from partitioning.fm_partitioning import FMPartitioning
6 | from partitioning.leader_election import LeaderElection
7 | from partitioning.leader_election_uniform import LeaderElectionUniform
8 | import numpy as np
9 | import networkx as nx
10 | from collections import defaultdict
11 | import os
12 |
13 | topo_name_list = [
14 | # ("GtsCe", 1),
15 | ("Cogentco", 0),
16 | # ("Uninett2010", 0)
17 | # ("Kdl", 1),
18 | # ("b4-teavar", 0),
19 | # ("ring_200", 0),
20 | # ("ring_400", 0),
21 | # ("SWANTETopology", 0)
22 | ]
23 | num_partitions_list = [
24 | 2,
25 | # 3,
26 | 4,
27 | # 5,
28 | # 6,
29 | # 8,
30 | # 10,
31 | # 12,
32 | 16,
33 | # 15,
34 | # 20,
35 | # 20,
36 | # 25,
37 | # 50,
38 | # 100,
39 | ]
40 | num_shortest_paths_list = [
41 | 2,
42 | 4,
43 | 10,
44 | 16,
45 | ]
46 |
47 | log_dir = "./partition_log/{}_{}_{}/"
48 |
49 | # partitioning_method = SpectralClustering
50 | partitioning_method_list = [
51 | FMPartitioning,
52 | SpectralClustering,
53 | # LeaderElection,
54 | # LeaderElectionUniform
55 | ]
56 |
57 | def k_shortest_paths(G, source, target, k, weight=None):
58 | return list(
59 | itertools.islice(nx.shortest_simple_paths(G, source, target, weight=weight), k)
60 | )
61 |
62 |
63 | for partitioning_method in partitioning_method_list:
64 | for num_partitions in num_partitions_list:
65 | for topo_name, is_topo_zoo in topo_name_list:
66 | if is_topo_zoo:
67 | fname = f'../../../ncflow/topologies/topology-zoo/{topo_name}.graphml'
68 | G = parse_and_convert_graphml.read_graph_graphml(fname)
69 | else:
70 | fname = f'{topo_name}.json'
71 | G = parse_and_convert_graphml.read_graph_json(fname)
72 | partition_obj = partitioning_method(num_partitions=num_partitions)
73 | partition_vector = partition_obj.partition(G, topo_name)
74 | print(topo_name, partition_vector, partition_obj.name)
75 | folder_path = log_dir.format(topo_name, num_partitions, partition_obj.name)
76 | if not os.path.isdir(folder_path):
77 | os.mkdir(folder_path)
78 | total_edges = len(G.edges())
79 | subgraph_edges = 0
80 | subgraph_nodes = []
81 | num_intra_cluster_paths_dict = defaultdict(int)
82 | num_total_paths_dict = defaultdict(int)
83 | for pid in np.unique(partition_vector):
84 | nodes = np.argwhere(partition_vector == pid).flatten().tolist()
85 | subgraph_g = G.subgraph(nodes)
86 | subgraph_nodes.append((len(subgraph_g.nodes()), len(subgraph_g.edges())))
87 | subgraph_edges += len(subgraph_g.edges())
88 | # print(subgraph_g.edges())
89 | parse_and_convert_graphml.write_graph_json(subgraph_g, folder_path + f"/cluster_{pid}.json")
90 |
91 | for num_shortest_paths in num_shortest_paths_list:
92 | for (node1, node2) in itertools.combinations(subgraph_g.nodes(), 2):
93 | paths = k_shortest_paths(G, node1, node2, num_shortest_paths)
94 | # print(paths)
95 | for s_path in paths:
96 | partitions = np.unique(partition_vector[s_path])
97 | if len(partitions) > 1:
98 | # print(partitions)
99 | num_intra_cluster_paths_dict[num_shortest_paths] += 1
100 | num_total_paths_dict[num_shortest_paths] += 1
101 |
102 | log_path = folder_path + f"/detail.txt"
103 | with open(log_path, "w") as fp:
104 | fp.writelines(f"num total edges {total_edges} num subgraph edges {subgraph_edges} num inter-cluster edges {total_edges - subgraph_edges}\n")
105 | fp.writelines(f'complete graph: nodes {len(G.nodes())} edges {len(G.edges())}\n')
106 | for sp in num_shortest_paths_list:
107 | fp.writelines(f'{sp}-shortest paths num intra cluster paths {num_intra_cluster_paths_dict[sp]}, num total paths {num_total_paths_dict[sp]} frac {num_intra_cluster_paths_dict[sp] / num_total_paths_dict[sp]}\n')
108 | for pid, (node, edge) in enumerate(subgraph_nodes):
109 | fp.writelines(f"cluster {pid}: nodes {node} edges {edge}\n")
110 |
111 |
--------------------------------------------------------------------------------
/MetaOptimize/VectorBinPacking/Bins.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize
6 | {
7 | using System;
8 | using System.Collections.Concurrent;
9 | using System.Collections.Generic;
10 | using System.Collections.ObjectModel;
11 | using System.Diagnostics;
12 | using System.Linq;
13 |
14 | ///
15 | /// A simple bin class that contains the bin sizes.
16 | ///
17 | public class Bins
18 | {
19 | private List> binSizeList;
20 |
21 | ///
22 | /// Creates a new instance of the class.
23 | /// Which captures the bins for VBP. If we want to assume all bins have
24 | /// the same size we can use this function to initiate the class:
25 | /// you only need to specify the
26 | /// number of bins and the size of each bin.
27 | ///
28 | public Bins(int numBins, List binSize)
29 | {
30 | this.binSizeList = new List>();
31 | for (int i = 0; i < numBins; i++) {
32 | this.binSizeList.Add(new List(binSize));
33 | }
34 | }
35 |
36 | ///
37 | /// Creates a new instance of the class.
38 | /// Use this function if the bins you want to use have different sizes.
39 | /// The input here will have to specify the size of each bin individually.
40 | ///
41 | public Bins(List> binList)
42 | {
43 | this.binSizeList = new List>();
44 | foreach (var binSize in binList) {
45 | this.binSizeList.Add(binSize.ToList());
46 | }
47 | }
48 |
49 | ///
50 | /// Get list of bin sizes.
51 | ///
52 | public ReadOnlyCollection> getBinSizes()
53 | {
54 | var binList = new List>();
55 | foreach (var binSize in this.binSizeList) {
56 | binList.Add(binSize.AsReadOnly());
57 | }
58 | return binList.AsReadOnly();
59 | }
60 |
61 | ///
62 | /// return number of bins.
63 | ///
64 | public int GetNum()
65 | {
66 | return this.binSizeList.Count;
67 | }
68 |
69 | ///
70 | /// return max capacity across all bins.
71 | ///
72 | public double MaxCapacity(int dim)
73 | {
74 | double maxCap = 0;
75 | foreach (var binSize in this.binSizeList) {
76 | maxCap = Math.Max(maxCap, binSize[dim]);
77 | }
78 | return maxCap;
79 | }
80 |
81 | ///
82 | /// return the sum of capacity of first $K$ bins.
83 | ///
84 | public double SumCapFirst(int k, int dim) {
85 | Debug.Assert(k <= this.binSizeList.Count);
86 | double sumCap = 0;
87 | for (int i = 0; i < k; i++) {
88 | sumCap += this.binSizeList[k][dim];
89 | }
90 | return sumCap;
91 | }
92 |
93 | ///
94 | /// return the sum of capaicty of bin $k$ over all dimensions.
95 | ///
96 | public double SumOverAllDim(int k) {
97 | double sumCap = 0;
98 | foreach (var binSize in this.binSizeList[k]) {
99 | sumCap += binSize;
100 | }
101 | return sumCap;
102 | }
103 |
104 | ///
105 | /// return the sum of capacity of first $k$ bins over all dimensions.
106 | ///
107 | public double SumOverAllDimFirst(int k) {
108 | Debug.Assert(k <= this.binSizeList.Count);
109 | double sumCap = 0;
110 | for (int i = 0; i < k; i++) {
111 | sumCap += this.SumOverAllDim(i);
112 | }
113 | return sumCap;
114 | }
115 |
116 | ///
117 | /// returns a new bin object consisting of the first $k$ bins from this one.
118 | ///
119 | public Bins GetFirstKBins(int k) {
120 | Debug.Assert(k <= this.binSizeList.Count);
121 | var newBinSizeList = new List>();
122 | for (int i = 0; i < k; i++) {
123 | newBinSizeList.Add(this.binSizeList[k]);
124 | }
125 | return new Bins(newBinSizeList);
126 | }
127 | }
128 | }
--------------------------------------------------------------------------------
/MetaOptimize.Test/PopEncodingTests.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize.Test
6 | {
7 | using System;
8 | using System.Collections.Generic;
9 | using MetaOptimize;
10 | using Microsoft.VisualStudio.TestTools.UnitTesting;
11 |
12 | ///
13 | /// Test that the pop encoding is working.
14 | ///
15 | [TestClass]
16 | public class PopEncodingTests
17 | {
18 | ///
19 | /// Function to create a new solver.
20 | ///
21 | internal Func> CreateSolver;
22 |
23 | ///
24 | /// Test that the optimality encoder works for a topology with one edge.
25 | ///
26 | [TestMethod]
27 | public void TestPopGapSimple()
28 | {
29 | var topology = new Topology();
30 |
31 | topology.AddNode("a");
32 | topology.AddNode("b");
33 | topology.AddEdge("a", "b", capacity: 10);
34 |
35 | var partition = new Dictionary<(string, string), int>();
36 | partition.Add(("a", "b"), 0);
37 | partition.Add(("b", "a"), 1);
38 | var popEncoder = new PopEncoder(CreateSolver(), maxNumPaths: 1, numPartitions: 2, demandPartitions: partition);
39 | var encoding = popEncoder.Encoding(topology);
40 | var solverSolution = popEncoder.Solver.Maximize(encoding.GlobalObjective);
41 | var optimizationSolution = (TEMaxFlowOptimizationSolution)popEncoder.GetSolution(solverSolution);
42 |
43 | Console.WriteLine(Newtonsoft.Json.JsonConvert.SerializeObject(optimizationSolution, Newtonsoft.Json.Formatting.Indented));
44 |
45 | // sk todo: AreEqual will fail due to doubles not matching; edit as below
46 | Assert.IsTrue(Utils.IsApproximately(5, optimizationSolution.MaxObjective));
47 | Assert.IsTrue(5 <= optimizationSolution.Demands[("a", "b")]);
48 | Assert.IsTrue(Utils.IsApproximately(5, optimizationSolution.Flows[("a", "b")]));
49 | Assert.IsTrue(0 <= optimizationSolution.Demands[("b", "a")]);
50 | Assert.AreEqual(0, optimizationSolution.Flows[("b", "a")]);
51 | }
52 |
53 | ///
54 | /// Test the POP encoder on a more complex example.
55 | ///
56 | /// TODO: in the documentation make sure you state that for the adversarial input generator to work
57 | /// the heuristic and the optimum encoders should use the same solver instance.
58 | [TestMethod]
59 | public void TestPopGapSK()
60 | {
61 | var topology = new Topology();
62 | topology.AddNode("a");
63 | topology.AddNode("b");
64 | topology.AddNode("c");
65 | topology.AddNode("d");
66 | topology.AddEdge("a", "b", capacity: 10);
67 | topology.AddEdge("a", "c", capacity: 10);
68 | topology.AddEdge("b", "d", capacity: 10);
69 | topology.AddEdge("c", "d", capacity: 10);
70 |
71 | var partition = topology.RandomPartition(2);
72 | // create the optimal encoder.
73 | var solver = CreateSolver();
74 | var optimalEncoder = new TEMaxFlowOptimalEncoder(solver, maxNumPaths: 1);
75 |
76 | var popEncoderG = new PopEncoder(solver, maxNumPaths: 1, numPartitions: 2, demandPartitions: partition);
77 | var adversarialInputGenerator = new TEAdversarialInputGenerator(topology, maxNumPaths: 1);
78 |
79 | var (optimalSolutionG, popSolutionG) = adversarialInputGenerator.MaximizeOptimalityGap(optimalEncoder, popEncoderG);
80 | Console.WriteLine("Optimal:");
81 | Console.WriteLine(Newtonsoft.Json.JsonConvert.SerializeObject(optimalSolutionG, Newtonsoft.Json.Formatting.Indented));
82 | Console.WriteLine("****");
83 | Console.WriteLine("Heuristic:");
84 | Console.WriteLine(Newtonsoft.Json.JsonConvert.SerializeObject(popSolutionG, Newtonsoft.Json.Formatting.Indented));
85 | Console.WriteLine("****");
86 |
87 | var optimal = ((TEMaxFlowOptimizationSolution)optimalSolutionG).MaxObjective;
88 | var heuristic = ((TEMaxFlowOptimizationSolution)popSolutionG).MaxObjective;
89 | Assert.IsTrue(Math.Abs(optimal - 40.0) < 0.01, $"Optimal is {optimal} != 40");
90 | Assert.IsTrue(Math.Abs(heuristic - 20.0) < 0.01, $"Heuristic is {heuristic} != 20");
91 |
92 | Console.WriteLine($"optimalG={optimal}, heuristicG={heuristic}");
93 | }
94 | }
95 | }
--------------------------------------------------------------------------------
/MetaOptimize/TrafficEngineering/DemandPinningQuantizedEncoder.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 | using System.Threading.Tasks;
6 |
7 | namespace MetaOptimize
8 | {
9 | using System;
10 | using System.Collections.Generic;
11 | using System.Linq;
12 | using NLog;
13 | using ZenLib;
14 | ///
15 | /// Encodes demand pinning solution.
16 | ///
17 | public class DemandPinningQuantizedEncoder : DemandPinningEncoder
18 | {
19 | private static readonly Logger Logger = LogManager.GetCurrentClassLogger();
20 | ///
21 | /// Auxilary variable used to encode DP.
22 | ///
23 | private Dictionary<(string, string), Polynomial> SPLowerBound { get; set; }
24 |
25 | ///
26 | /// Create a new instance of the class.
27 | ///
28 | /// The solver.
29 | /// The max number of paths between nodes.
30 | /// The threshold to use for demand pinning.
31 | /// The scale factor to show the input is downscaled.
32 | public DemandPinningQuantizedEncoder(ISolver solver, int k, double threshold = 0,
33 | double scaleFactor = 1.0) : base(solver, k, threshold, scaleFactor)
34 | {
35 | }
36 |
37 | ///
38 | /// Create auxiliary variables to model max() in DP formulation.
39 | ///
40 | protected override void CreateAuxVariable()
41 | {
42 | this.SPLowerBound = new Dictionary<(string, string), Polynomial>();
43 | foreach (var pair in this.Topology.GetNodePairs())
44 | {
45 | if (!IsDemandValid(pair))
46 | {
47 | continue;
48 | }
49 | this.SPLowerBound[pair] = this.DemandVariables[pair].GetTermsWithCoeffLeq(this.Threshold);
50 | }
51 | }
52 |
53 | ///
54 | /// verify output.
55 | ///
56 | protected override void VerifyOutput(TSolution solution, Dictionary<(string, string), double> demands, Dictionary<(string, string), double> flows)
57 | {
58 | foreach (var (pair, demand) in demands)
59 | {
60 | if (!flows.ContainsKey(pair))
61 | {
62 | continue;
63 | }
64 | if (demand <= this.Threshold && Math.Abs(flows[pair] - demand) > 0.001)
65 | {
66 | Console.WriteLine($"error: src {pair.Item1}, dst {pair.Item2}, demand {demand}, flow {flows[pair]}");
67 | throw new Exception("Demand below the threshold is not routed fully through the shortest path.");
68 | }
69 | bool found = false;
70 | if (demand <= 0.001)
71 | {
72 | found = true;
73 | }
74 | else
75 | {
76 | foreach (var demandlvl in this.DemandVariables[pair].GetTerms())
77 | {
78 | if (Math.Abs(demand - demandlvl.Coefficient) <= 0.001)
79 | {
80 | found = true;
81 | }
82 | }
83 | }
84 | if (!found)
85 | {
86 | Console.WriteLine($"error: src {pair.Item1}, dst {pair.Item2}, demand {demand}, flow {flows[pair]}");
87 | throw new Exception("Demand does not match prespecified levels. Please check solver's precision.");
88 | }
89 | }
90 | }
91 |
92 | ///
93 | /// add Demand Pinning Constraints.
94 | ///
95 | protected override void GenerateDPConstraints(Polynomial objectiveFunction)
96 | {
97 | // generating the max constraints that achieve pinning.
98 | Logger.Info("Generating Quantized DP constraints.");
99 | foreach (var (pair, polyTerm) in sumNonShortestDict)
100 | {
101 | // shortest path flows \geq quantized demand with coefficient less than equal threshold
102 | var shortestPathUB = this.SPLowerBound[pair].Copy();
103 | shortestPathUB.Add(new Term(-1, shortestFlowVariables[pair]));
104 | this.innerProblemEncoder.AddLeqZeroConstraint(shortestPathUB);
105 | }
106 | }
107 | }
108 | }
109 |
--------------------------------------------------------------------------------
/Topologies/outputs/paths/b4-teavar_2.json:
--------------------------------------------------------------------------------
1 | {"2":{"0_9":[["0","2","3","7","9"],["0","2","5","7","9"]],"2_3":[["2","3"],["2","5","4","3"]],"1_7":[["1","4","3","7"],["1","4","5","7"]],"8_11":[["8","9","11"],["8","10","11"]],"11_2":[["11","9","7","3","2"],["11","10","6","3","2"]],"1_11":[["1","4","3","6","10","11"],["1","4","5","6","10","11"]],"2_7":[["2","3","7"],["2","5","7"]],"5_3":[["5","2","3"],["5","4","3"]],"6_1":[["6","3","4","1"],["6","5","4","1"]],"6_9":[["6","10","9"],["6","7","9"]],"3_7":[["3","7"],["3","6","7"]],"9_11":[["9","11"],["9","10","11"]],"4_0":[["4","1","0"],["4","3","2","0"]],"11_7":[["11","9","7"],["11","10","6","7"]],"4_11":[["4","3","7","9","11"],["4","5","7","9","11"]],"8_9":[["8","9"],["8","10","9"]],"10_2":[["10","6","3","2"],["10","6","5","2"]],"1_5":[["1","4","5"],["1","0","2","5"]],"4_8":[["4","3","7","9","8"],["4","5","7","9","8"]],"2_5":[["2","5"],["2","3","4","5"]],"6_8":[["6","10","8"],["6","7","9","8"]],"9_0":[["9","7","3","2","0"],["9","7","5","2","0"]],"4_6":[["4","3","6"],["4","5","6"]],"6_7":[["6","7"],["6","3","7"]],"3_9":[["3","7","9"],["3","6","7","9"]],"7_6":[["7","6"],["7","3","6"]],"7_8":[["7","9","8"],["7","6","10","8"]],"7_3":[["7","3"],["7","6","3"]],"5_11":[["5","7","9","11"],["5","6","10","11"]],"2_0":[["2","0"],["2","3","4","1","0"]],"11_6":[["11","10","6"],["11","9","7","6"]],"9_10":[["9","10"],["9","8","10"]],"8_3":[["8","9","7","3"],["8","10","6","3"]],"9_8":[["9","8"],["9","10","8"]],"10_0":[["10","6","3","2","0"],["10","6","5","2","0"]],"10_5":[["10","6","5"],["10","9","7","5"]],"8_0":[["8","9","7","3","2","0"],["8","10","6","3","2","0"]],"8_6":[["8","10","6"],["8","9","7","6"]],"10_3":[["10","6","3"],["10","9","7","3"]],"7_0":[["7","3","2","0"],["7","5","2","0"]],"4_1":[["4","1"],["4","3","2","0","1"]],"5_2":[["5","2"],["5","4","3","2"]],"10_7":[["10","6","7"],["10","9","7"]],"11_9":[["11","9"],["11","10","9"]],"2_1":[["2","0","1"],["2","3","4","1"]],"3_0":[["3","2","0"],["3","4","1","0"]],"5_10":[["5","6","10"],["5","7","6","10"]],"3_8":[["3","7","9","8"],["3","6","10","8"]],"0_11":[["0","2","3","6","10","11"],["0","2","5","6","10","11"]],"10_1":[["10","6","3","4","1"],["10","6","5","4","1"]],"0_5":[["0","2","5"],["0","1","4","5"]],"0_4":[["0","1","4"],["0","2","3","4"]],"1_8":[["1","4","3","6","10","8"],["1","4","5","6","10","8"]],"5_7":[["5","7"],["5","6","7"]],"5_4":[["5","4"],["5","2","3","4"]],"6_5":[["6","5"],["6","7","5"]],"1_9":[["1","4","3","7","9"],["1","4","5","7","9"]],"3_5":[["3","2","5"],["3","4","5"]],"5_1":[["5","4","1"],["5","2","0","1"]],"2_9":[["2","3","7","9"],["2","5","7","9"]],"9_2":[["9","7","3","2"],["9","7","5","2"]],"0_7":[["0","2","3","7"],["0","2","5","7"]],"5_8":[["5","7","9","8"],["5","6","10","8"]],"6_10":[["6","10"],["6","7","9","10"]],"6_0":[["6","3","2","0"],["6","5","2","0"]],"5_0":[["5","2","0"],["5","4","1","0"]],"11_1":[["11","9","7","3","4","1"],["11","10","6","3","4","1"]],"8_7":[["8","9","7"],["8","10","6","7"]],"3_6":[["3","6"],["3","7","6"]],"8_5":[["8","9","7","5"],["8","10","6","5"]],"0_10":[["0","2","3","6","10"],["0","2","5","6","10"]],"10_6":[["10","6"],["10","9","7","6"]],"8_4":[["8","9","7","3","4"],["8","10","6","3","4"]],"7_9":[["7","9"],["7","6","10","9"]],"10_8":[["10","8"],["10","9","8"]],"7_2":[["7","3","2"],["7","5","2"]],"3_10":[["3","6","10"],["3","7","6","10"]],"11_8":[["11","9","8"],["11","10","8"]],"5_6":[["5","6"],["5","7","6"]],"7_1":[["7","3","4","1"],["7","5","4","1"]],"10_4":[["10","6","3","4"],["10","6","5","4"]],"6_3":[["6","3"],["6","7","3"]],"11_0":[["11","9","7","3","2","0"],["11","10","6","3","2","0"]],"9_6":[["9","7","6"],["9","10","6"]],"1_0":[["1","0"],["1","4","3","2","0"]],"9_1":[["9","7","3","4","1"],["9","7","5","4","1"]],"0_6":[["0","2","3","6"],["0","2","5","6"]],"0_8":[["0","2","3","6","10","8"],["0","2","5","6","10","8"]],"10_9":[["10","9"],["10","8","9"]],"1_3":[["1","4","3"],["1","0","2","3"]],"3_11":[["3","7","9","11"],["3","6","10","11"]],"7_10":[["7","9","10"],["7","6","10"]],"8_1":[["8","9","7","3","4","1"],["8","10","6","3","4","1"]],"4_2":[["4","3","2"],["4","5","2"]],"0_2":[["0","2"],["0","1","4","3","2"]],"11_3":[["11","9","7","3"],["11","10","6","3"]],"9_3":[["9","7","3"],["9","10","6","3"]],"3_4":[["3","4"],["3","2","5","4"]],"5_9":[["5","7","9"],["5","6","7","9"]],"9_7":[["9","7"],["9","10","6","7"]],"1_4":[["1","4"],["1","0","2","3","4"]],"8_2":[["8","9","7","3","2"],["8","10","6","3","2"]],"2_6":[["2","3","6"],["2","5","6"]],"11_5":[["11","9","7","5"],["11","10","6","5"]],"8_10":[["8","10"],["8","9","10"]],"9_4":[["9","7","3","4"],["9","7","5","4"]],"6_4":[["6","3","4"],["6","5","4"]],"1_6":[["1","4","3","6"],["1","4","5","6"]],"4_7":[["4","3","7"],["4","5","7"]],"7_11":[["7","9","11"],["7","6","10","11"]],"3_1":[["3","4","1"],["3","2","0","1"]],"10_11":[["10","11"],["10","9","11"]],"4_3":[["4","3"],["4","5","2","3"]],"4_9":[["4","3","7","9"],["4","5","7","9"]],"2_11":[["2","3","7","9","11"],["2","5","7","9","11"]],"4_5":[["4","5"],["4","3","2","5"]],"6_2":[["6","3","2"],["6","5","2"]],"2_10":[["2","3","6","10"],["2","5","6","10"]],"7_5":[["7","5"],["7","6","5"]],"6_11":[["6","10","11"],["6","7","9","11"]],"1_2":[["1","0","2"],["1","4","3","2"]],"3_2":[["3","2"],["3","4","5","2"]],"0_3":[["0","2","3"],["0","1","4","3"]],"0_1":[["0","1"],["0","2","3","4","1"]],"1_10":[["1","4","3","6","10"],["1","4","5","6","10"]],"9_5":[["9","7","5"],["9","10","6","5"]],"7_4":[["7","3","4"],["7","5","4"]],"2_8":[["2","3","7","9","8"],["2","5","7","9","8"]],"2_4":[["2","3","4"],["2","5","4"]],"11_10":[["11","10"],["11","9","10"]],"11_4":[["11","9","7","3","4"],["11","10","6","3","4"]],"4_10":[["4","3","6","10"],["4","5","6","10"]]}}
--------------------------------------------------------------------------------
/MetaOptimize/GurobiCallback.cs:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Microsoft. All rights reserved.
3 | //
4 |
5 | namespace MetaOptimize
6 | {
7 | using System;
8 | using Gurobi;
9 | class GurobiCallback : GRBCallback
10 | {
11 | // TODO: need a comment that describes each of these parameters.
12 | private bool storeProgressEnabled = false;
13 | private GurobiStoreProgressCallback storeProgressCallback = null;
14 | private bool terminationCallbackEnabled = false;
15 | private GurobiTerminationCallback terminationCallback = null;
16 | private bool timeoutCallbackEnabled = false;
17 | private GurobiTimeoutCallback timeoutCallback = null;
18 | double presolvetime_ms = -1;
19 |
20 | // TODO: needs a comment that describes what this function does and what each input means.
21 | public GurobiCallback(
22 | GRBModel model,
23 | bool storeProgress = false,
24 | String dirname = null,
25 | String filename = null,
26 | double terminateNoImprovement_ms = -1,
27 | double timeout = 0)
28 | {
29 | if (storeProgress)
30 | {
31 | this.storeProgressEnabled = true;
32 | this.storeProgressCallback = new GurobiStoreProgressCallback(model, dirname, filename);
33 | }
34 | if (terminateNoImprovement_ms > 0)
35 | {
36 | this.terminationCallbackEnabled = true;
37 | this.terminationCallback = new GurobiTerminationCallback(model, terminateNoImprovement_ms);
38 | }
39 | if (timeout > 0)
40 | {
41 | this.timeoutCallbackEnabled = true;
42 | this.timeoutCallback = new GurobiTimeoutCallback(model, timeout);
43 | }
44 | }
45 |
46 | // TODO: needs many many more comments to help a user understand what is happening.
47 | protected override void Callback()
48 | {
49 | try
50 | {
51 | // Utils.AppendToFile(@"../logs/logs.txt", " where " + where);
52 | if (where == GRB.Callback.PRESOLVE)
53 | {
54 | this.presolvetime_ms = GetDoubleInfo(GRB.Callback.RUNTIME) * 1000;
55 | // Utils.AppendToFile(@"../logs/logs.txt", "measured presolve timer = " + presolvetime_ms);
56 | }
57 | else if (where == GRB.Callback.MESSAGE)
58 | {
59 | // nothing to do.
60 | }
61 | else
62 | {
63 | if (where == GRB.Callback.MIP || where == GRB.Callback.MIPSOL)
64 | {
65 | // TODO: not clear what the return value is here.
66 | // TODO: cleanup and remove commented code.
67 | double obj = -1;
68 | if (where == GRB.Callback.MIP)
69 | {
70 | obj = GetDoubleInfo(GRB.Callback.MIP_OBJBST);
71 | }
72 | else
73 | {
74 | obj = GetDoubleInfo(GRB.Callback.MIPSOL_OBJ);
75 | }
76 | var currtime_ms = GetDoubleInfo(GRB.Callback.RUNTIME) * 1000;
77 | // Utils.AppendToFile(@"../logs/logs.txt", "measured time = " + currtime_ms + " obj = " + obj);
78 | if (this.storeProgressEnabled)
79 | {
80 | this.storeProgressCallback.CallCallback(obj, currtime_ms, presolvetime_ms);
81 | }
82 | if (this.terminationCallbackEnabled)
83 | {
84 | this.terminationCallback.CallCallback(obj);
85 | }
86 | }
87 | if (this.timeoutCallbackEnabled)
88 | {
89 | this.timeoutCallback.CallCallback(where, presolvetime_ms,
90 | storeLastIfTerminated: storeProgressEnabled, storeProgressCallback: storeProgressCallback);
91 | }
92 | }
93 | }
94 | catch (GRBException e)
95 | {
96 | Console.WriteLine("Error code: " + e.ErrorCode);
97 | Console.WriteLine(e.Message);
98 | Console.WriteLine(e.StackTrace);
99 | }
100 | catch (Exception e)
101 | {
102 | Console.WriteLine("Error during callback");
103 | Console.WriteLine(e.StackTrace);
104 | }
105 | }
106 |
107 | private void ResetTermination()
108 | {
109 | if (this.terminationCallbackEnabled)
110 | {
111 | this.terminationCallback.ResetTermination();
112 | }
113 | }
114 |
115 | private void ResetProgressTimer()
116 | {
117 | if (this.storeProgressEnabled)
118 | {
119 | this.storeProgressCallback.ResetProgressTimer();
120 | }
121 | }
122 |
123 | private void ResetTimeout()
124 | {
125 | if (this.timeoutCallbackEnabled)
126 | {
127 | this.timeoutCallback.ResetTermination();
128 | }
129 | }
130 |
131 | public void ResetAll()
132 | {
133 | this.presolvetime_ms = 0;
134 | this.ResetProgressTimer();
135 | this.ResetTermination();
136 | this.ResetTimeout();
137 | }
138 |
139 | public void AppendToStoreProgressFile(double time_ms, double gap)
140 | {
141 | if (storeProgressEnabled)
142 | {
143 | this.storeProgressCallback.AppendToStoreProgressFile(time_ms, gap);
144 | }
145 | }
146 | }
147 | }
--------------------------------------------------------------------------------
/MetaOptimize/TrafficEngineering/ModifiedDemandPinningQuantizedEncoder.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 | using System.Threading.Tasks;
6 | using NLog;
7 |
8 | namespace MetaOptimize
9 | {
10 | using System;
11 | using System.Collections.Generic;
12 | using System.Linq;
13 | using ZenLib;
14 | ///
15 | /// Encodes demand pinning solution.
16 | ///
17 | public class ModifiedDemandPinningQuantizedEncoder : DemandPinningQuantizedEncoder
18 | {
19 | private static readonly Logger Logger = LogManager.GetCurrentClassLogger();
20 | ///
21 | /// Auxilary variable used to encode DP.
22 | ///
23 | private Dictionary<(string, string), Polynomial> SPLowerBound { get; set; }
24 | private Dictionary<(string, string), Polynomial> NSPUpperBound { get; set; }
25 |
26 | ///
27 | /// maximum shortest path length to pin.
28 | ///
29 | private int MaxShortestPathLen;
30 |
31 | ///
32 | /// Create a new instance of the class.
33 | ///
34 | /// The solver.
35 | /// The max number of paths between nodes.
36 | /// The maximum shortest path length to pin.
37 | /// The threshold to use for demand pinning.
38 | /// The scale factor to show the input is downscaled.
39 | public ModifiedDemandPinningQuantizedEncoder(ISolver solver, int k, int MaxShortestPathLen, double threshold = 0,
40 | double scaleFactor = 1.0) : base(solver, k, threshold, scaleFactor)
41 | {
42 | if (MaxShortestPathLen < 1)
43 | {
44 | throw new Exception("The max shortest path len should be >= 1 but received " + MaxShortestPathLen);
45 | }
46 | this.MaxShortestPathLen = MaxShortestPathLen;
47 | }
48 |
49 | ///
50 | /// Create auxiliary variables to model max() in DP formulation.
51 | ///
52 | protected override void CreateAuxVariable()
53 | {
54 | this.SPLowerBound = new Dictionary<(string, string), Polynomial>();
55 | this.NSPUpperBound = new Dictionary<(string, string), Polynomial>();
56 | foreach (var pair in this.Topology.GetNodePairs())
57 | {
58 | if (!IsDemandValid(pair))
59 | {
60 | continue;
61 | }
62 | this.SPLowerBound[pair] = this.DemandVariables[pair].GetTermsWithCoeffLeq(this.Threshold);
63 | this.NSPUpperBound[pair] = this.DemandVariables[pair].GetTermsWithCoeffGreater(this.Threshold);
64 | }
65 | }
66 |
67 | ///
68 | /// verify output.
69 | ///
70 | protected override void VerifyOutput(TSolution solution, Dictionary<(string, string), double> demands, Dictionary<(string, string), double> flows)
71 | {
72 | foreach (var (pair, demand) in demands)
73 | {
74 | if (!flows.ContainsKey(pair))
75 | {
76 | continue;
77 | }
78 | var shortestPaths = this.Topology.ShortestKPaths(1, pair.Item1, pair.Item2);
79 | if (shortestPaths[0].Count() <= this.MaxShortestPathLen)
80 | {
81 | if (demand <= this.Threshold && Math.Abs(flows[pair] - demand) > 0.001)
82 | {
83 | Logger.Debug($"{pair.Item1},{pair.Item2},{demand},{flows[pair]}");
84 | throw new Exception("does not match");
85 | }
86 | }
87 | bool found = false;
88 | if (demand <= 0.001)
89 | {
90 | found = true;
91 | }
92 | else
93 | {
94 | foreach (var demandlvl in this.DemandVariables[pair].GetTerms())
95 | {
96 | if (Math.Abs(demand - demandlvl.Coefficient) <= 0.001)
97 | {
98 | found = true;
99 | }
100 | }
101 | }
102 | if (!found)
103 | {
104 | Logger.Debug($"{pair.Item1},{pair.Item2},{demand},{flows[pair]}");
105 | throw new Exception("does not match");
106 | }
107 | }
108 | }
109 |
110 | ///
111 | /// add Demand Pinning Constraints.
112 | ///
113 | protected override void GenerateDPConstraints(Polynomial objectiveFunction)
114 | {
115 | Logger.Info("Generating Modified Quantized DP constraints.");
116 | // generating the max constraints that achieve pinning.
117 | foreach (var (pair, polyTerm) in sumNonShortestDict)
118 | {
119 | var shortestPaths = this.Topology.ShortestKPaths(1, pair.Item1, pair.Item2);
120 | if (shortestPaths[0].Count() <= this.MaxShortestPathLen)
121 | {
122 | // shortest path flows \geq quantized demand with coefficient less than equal threshold
123 | var shortestPathUB = this.SPLowerBound[pair].Copy();
124 | shortestPathUB.Add(new Term(-1, shortestFlowVariables[pair]));
125 | this.innerProblemEncoder.AddLeqZeroConstraint(shortestPathUB);
126 | }
127 | else
128 | {
129 | // for scalability reasons, zero out the variables <= threshold
130 | var poly = this.DemandVariables[pair].GetTermsWithCoeffLeq(this.Threshold);
131 | this.Solver.AddEqZeroConstraint(poly);
132 | }
133 | }
134 | }
135 | }
136 | }
137 |
--------------------------------------------------------------------------------