Skip to content

Commit

Permalink
Keep minimum weight edge when merging parallel boundary edges in conv…
Browse files Browse the repository at this point in the history
…ersion from UserGraph to MatchingGraph/SearchGraph (#86)

* Ensure that parallel edge with minimum weight is kept when merging UserGraph boundary nodes results in parallel boundary edges. Fixes #81.

* Update pm::UserGraph::to_search_graph to match parallel boundary edge behaviour of pm::UserGraph::to_matching_graph

* Refactor by adding UserGraph::to_matching_or_search_graph_helper

* Bump stim version in ci

* Unpin ninja version in ci

* Remove matrix.python-version in ci

* Add setup-python action to build_wheels in ci

* Update cibuildwheel to v2.16.5

* Specify python-version in pip_install ci

* Add back macosx deployment target in build_wheels in ci
  • Loading branch information
oscarhiggott authored Feb 1, 2024
1 parent c30fcce commit 7c31c23
Show file tree
Hide file tree
Showing 4 changed files with 78 additions and 24 deletions.
18 changes: 9 additions & 9 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
CIBW_BUILD: "${{ matrix.os_dist.dist }}"
CIBW_ARCHS_MACOS: "x86_64 universal2 arm64"
CIBW_BEFORE_BUILD: pip install --upgrade ninja
CIBW_TEST_REQUIRES: pytest stim~=1.10.dev1666411378
CIBW_TEST_REQUIRES: pytest stim
CIBW_TEST_COMMAND: pytest {project}/tests
strategy:
fail-fast: false
Expand Down Expand Up @@ -116,14 +116,16 @@ jobs:
- uses: actions/checkout@v3
with:
submodules: true

- uses: actions/setup-python@v4

- name: Install g++
if: runner.os == 'Linux'
run: |
sudo apt update
sudo apt install gcc-10 g++-10
- uses: pypa/cibuildwheel@v2.16.4
- uses: pypa/cibuildwheel@v2.16.5

- name: Verify clean directory
run: git diff --exit-code
Expand Down Expand Up @@ -179,7 +181,7 @@ jobs:
fail-fast: false
matrix:
platform: [windows-latest, macos-latest, ubuntu-latest]
python-version: ["3.10"]
python-version: ["3.11"]

runs-on: ${{ matrix.platform }}

Expand All @@ -193,16 +195,16 @@ jobs:
python-version: ${{ matrix.python-version }}

- name: Add requirements
run: python -m pip install --upgrade cmake>=3.12 ninja==1.10.2.4 pytest flake8 pytest-cov
run: python -m pip install --upgrade cmake>=3.12 ninja pytest flake8 pytest-cov setuptools

- name: Build and install
run: pip install --verbose -e .
run: python -m pip install --verbose -e .

- name: Test without stim
run: python -m pytest tests

- name: Add stim
run: python -m pip install stim~=1.10.dev1666411378
run: python -m pip install stim

- name: Test with stim using coverage
run: python -m pytest tests --cov=./src/pymatching --cov-report term
Expand All @@ -218,8 +220,6 @@ jobs:
submodules: true

- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}

- name: Install pandoc
run: |
Expand All @@ -244,7 +244,7 @@ jobs:
with:
python-version: '3.10'
- name: Add requirements
run: python -m pip install --upgrade cmake>=3.12 ninja==1.10.2.4 pytest flake8 pytest-cov stim~=1.10.dev1666411378
run: python -m pip install --upgrade cmake>=3.12 ninja pytest flake8 pytest-cov stim
- name: Build and install
run: pip install --verbose -e .
- name: Run tests and collect coverage
Expand Down
25 changes: 11 additions & 14 deletions src/pymatching/sparse_blossom/driver/user_graph.cc
Original file line number Diff line number Diff line change
Expand Up @@ -245,18 +245,17 @@ double pm::UserGraph::max_abs_weight() {

pm::MatchingGraph pm::UserGraph::to_matching_graph(pm::weight_int num_distinct_weights) {
pm::MatchingGraph matching_graph(nodes.size(), _num_observables);
double normalising_constant = iter_discretized_edges(

double normalising_constant = to_matching_or_search_graph_helper(
num_distinct_weights,
[&](size_t u, size_t v, pm::signed_weight_int weight, const std::vector<size_t>& observables) {
matching_graph.add_edge(u, v, weight, observables);
},
[&](size_t u, pm::signed_weight_int weight, const std::vector<size_t>& observables) {
// Only add the boundary edge if it already isn't present. Ideally parallel edges should already have been
// merged, however we are implicitly merging all boundary nodes in this step, which could give rise to new
// parallel edges.
if (matching_graph.nodes[u].neighbors.empty() || matching_graph.nodes[u].neighbors[0])
matching_graph.add_boundary_edge(u, weight, observables);
});
matching_graph.add_boundary_edge(u, weight, observables);
}
);

matching_graph.normalising_constant = normalising_constant;
if (boundary_nodes.size() > 0) {
matching_graph.is_user_graph_boundary_node.clear();
Expand All @@ -270,18 +269,16 @@ pm::MatchingGraph pm::UserGraph::to_matching_graph(pm::weight_int num_distinct_w
pm::SearchGraph pm::UserGraph::to_search_graph(pm::weight_int num_distinct_weights) {
/// Identical to to_matching_graph but for constructing a pm::SearchGraph
pm::SearchGraph search_graph(nodes.size());
iter_discretized_edges(

to_matching_or_search_graph_helper(
num_distinct_weights,
[&](size_t u, size_t v, pm::signed_weight_int weight, const std::vector<size_t>& observables) {
search_graph.add_edge(u, v, weight, observables);
},
[&](size_t u, pm::signed_weight_int weight, const std::vector<size_t>& observables) {
// Only add the boundary edge if it already isn't present. Ideally parallel edges should already have been
// merged, however we are implicitly merging all boundary nodes in this step, which could give rise to new
// parallel edges.
if (search_graph.nodes[u].neighbors.empty() || search_graph.nodes[u].neighbors[0])
search_graph.add_boundary_edge(u, weight, observables);
});
search_graph.add_boundary_edge(u, weight, observables);
}
);
return search_graph;
}

Expand Down
38 changes: 37 additions & 1 deletion src/pymatching/sparse_blossom/driver/user_graph.h
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,11 @@ class UserGraph {
pm::weight_int num_distinct_weights,
const EdgeCallable& edge_func,
const BoundaryEdgeCallable& boundary_edge_func);
template <typename EdgeCallable, typename BoundaryEdgeCallable>
double to_matching_or_search_graph_helper(
pm::weight_int num_distinct_weights,
const EdgeCallable& edge_func,
const BoundaryEdgeCallable& boundary_edge_func);
pm::MatchingGraph to_matching_graph(pm::weight_int num_distinct_weights);
pm::SearchGraph to_search_graph(pm::weight_int num_distinct_weights);
pm::Mwpm to_mwpm(pm::weight_int num_distinct_weights, bool ensure_search_graph_included);
Expand All @@ -120,7 +125,6 @@ inline double UserGraph::iter_discretized_edges(
pm::weight_int num_distinct_weights,
const EdgeCallable& edge_func,
const BoundaryEdgeCallable& boundary_edge_func) {
pm::MatchingGraph matching_graph(nodes.size(), _num_observables);
double normalising_constant = get_edge_weight_normalising_constant(num_distinct_weights);

for (auto& e : edges) {
Expand All @@ -141,6 +145,38 @@ inline double UserGraph::iter_discretized_edges(
return normalising_constant * 2;
}

template <typename EdgeCallable, typename BoundaryEdgeCallable>
inline double UserGraph::to_matching_or_search_graph_helper(
pm::weight_int num_distinct_weights,
const EdgeCallable& edge_func,
const BoundaryEdgeCallable& boundary_edge_func) {

// Use vectors to store boundary edges initially before adding them to the graph, so
// that parallel boundary edges with negative edge weights can be handled correctly
std::vector<bool> has_boundary_edge(nodes.size(), false);
std::vector<pm::signed_weight_int> boundary_edge_weights(nodes.size());
std::vector<std::vector<size_t>> boundary_edge_observables(nodes.size());

double normalising_constant = iter_discretized_edges(
num_distinct_weights,
edge_func,
[&](size_t u, pm::signed_weight_int weight, const std::vector<size_t>& observables) {
// For parallel boundary edges, keep the boundary edge with the smaller weight
if (!has_boundary_edge[u] || boundary_edge_weights[u] > weight){
boundary_edge_weights[u] = weight;
boundary_edge_observables[u] = observables;
has_boundary_edge[u] = true;
}
});

// Now add boundary edges to the graph
for (size_t i = 0; i < has_boundary_edge.size(); i++) {
if (has_boundary_edge[i])
boundary_edge_func(i, boundary_edge_weights[i], boundary_edge_observables[i]);
}
return normalising_constant;
}

UserGraph detector_error_model_to_user_graph(const stim::DetectorErrorModel& detector_error_model);

} // namespace pm
Expand Down
21 changes: 21 additions & 0 deletions tests/matching/decode_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,3 +276,24 @@ def test_decode_to_edges():
m.add_edge(i, i + 1)
edges = m.decode_to_edges_array([0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0])
assert np.array_equal(edges, np.array([[9, 8], [5, 6], [4, 3], [5, 4], [0, 1], [0, -1]], dtype=np.int64))


def test_parallel_boundary_edges_decoding():
m = Matching()
m.set_boundary_nodes({0, 2})
m.add_edge(0, 1, fault_ids=0, weight=3.5)
m.add_edge(1, 2, fault_ids=1, weight=2.5)
assert np.array_equal(m.decode([0, 1]), np.array([0, 1], dtype=np.uint8))
m.add_boundary_edge(1, fault_ids=100, weight=100)
# Test pm::SearchGraph
assert np.array_equal(np.nonzero(m.decode([0, 1]))[0], np.array([1], dtype=int))

m = Matching()
m.add_edge(0, 1, fault_ids=0, weight=-1)
m.add_edge(0, 2, fault_ids=1, weight=3)
m.add_boundary_edge(0, fault_ids=2, weight=-0.5)
m.add_edge(0, 3, fault_ids=3, weight=-3)
m.add_edge(0, 4, fault_ids=4, weight=-2)
assert np.array_equal(m.decode([1, 0, 0, 0, 0]), np.array([0, 0, 1, 0, 0], dtype=np.uint8))
m.set_boundary_nodes({1, 2, 3, 4})
assert np.array_equal(m.decode([1, 0, 0, 0, 0]), np.array([0, 0, 0, 1, 0], dtype=np.uint8))

0 comments on commit 7c31c23

Please sign in to comment.