Skip to content

Commit

Permalink
add steiner tree metric to C interface
Browse files Browse the repository at this point in the history
  • Loading branch information
kittobi1992 committed Jul 21, 2023
1 parent 9aafdba commit e561b72
Show file tree
Hide file tree
Showing 11 changed files with 378 additions and 13 deletions.
66 changes: 64 additions & 2 deletions include/libmtkahypar.h
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,12 @@ MT_KAHYPAR_API mt_kahypar_hypergraph_t mt_kahypar_read_hypergraph_from_file(cons
const mt_kahypar_preset_type_t preset,
const mt_kahypar_file_format_type_t file_format);

/**
* Reads a target graph in Metis file format. The target graph can be used in the
* 'mt_kahypar_map' function to map a (hyper)graph onto it.
*/
MT_KAHYPAR_API mt_kahypar_target_graph_t* mt_kahypar_read_target_graph_from_file(const char* file_name);

/**
* Constructs a hypergraph from a given adjacency array that specifies the hyperedges.
*
Expand Down Expand Up @@ -148,13 +154,32 @@ MT_KAHYPAR_API mt_kahypar_hypergraph_t mt_kahypar_create_graph(const mt_kahypar_
const mt_kahypar_hypernode_id_t* edges,
const mt_kahypar_hyperedge_weight_t* edge_weights,
const mt_kahypar_hypernode_weight_t* vertex_weights);

/**
* Constructs a target graph from a given edge list vector. The target graph can be used in the
* 'mt_kahypar_map' function to map a (hyper)graph onto it.
*
* Example:
* edges: | 0 2 | 0 1 | 2 3 | 1 3 |
* Defines a graph with four edges -> e_0 = {0,2}, e_1 = {0,1}, e_2 = {2,3}, e_3 = {1,3}
*
* \note For unweighted graphs, you can pass nullptr to either hyperedge_weights.
* \note After construction, the arguments of this function are no longer needed and can be deleted.
*/
MT_KAHYPAR_API mt_kahypar_target_graph_t* mt_kahypar_create_target_graph(const mt_kahypar_hypernode_id_t num_vertices,
const mt_kahypar_hyperedge_id_t num_edges,
const mt_kahypar_hypernode_id_t* edges,
const mt_kahypar_hyperedge_weight_t* edge_weights);

/**
* Deletes the (hyper)graph object.
*/
MT_KAHYPAR_API void mt_kahypar_free_hypergraph(mt_kahypar_hypergraph_t hypergraph);

/**
* Deletes a target graph object.
*/
MT_KAHYPAR_API void mt_kahypar_free_target_graph(mt_kahypar_target_graph_t* target_graph);

/**
* Returns the number of nodes of the (hyper)graph.
*/
Expand Down Expand Up @@ -184,7 +209,7 @@ MT_KAHYPAR_API bool mt_kahypar_check_compatibility(mt_kahypar_hypergraph_t hyper
mt_kahypar_preset_type_t preset);

/**
* Partitions a (hyper)graph according to the parameters specified in the partitioning context.
* Partitions a (hyper)graph with the configuration specified in the partitioning context.
*
* \note Before partitioning, the number of blocks, imbalance parameter and objective function must be
* set in the partitioning context. This can be done either via mt_kahypar_set_context_parameter(...)
Expand All @@ -193,6 +218,25 @@ MT_KAHYPAR_API bool mt_kahypar_check_compatibility(mt_kahypar_hypergraph_t hyper
MT_KAHYPAR_API mt_kahypar_partitioned_hypergraph_t mt_kahypar_partition(mt_kahypar_hypergraph_t hypergraph,
mt_kahypar_context_t* context);

/**
* Maps a (hyper)graph onto a target graph with the configuration specified in the partitioning context.
* The number of blocks of the output mapping/partition is the same as the number of nodes in the target graph
* (each node of the target graph represents a block). The objective is to minimize the total weight of
* all Steiner trees spanned by the (hyper)edges on the target graph. A Steiner tree is a tree with minimal weight
* that spans a subset of the nodes (in our case the hyperedges) on the target graph. This objective function
* is able to acurately model wire-lengths in VLSI design or communication costs in a distributed system where some
* processors do not communicate directly with each other or different speeds.
*
* \note Since computing Steiner trees is an NP-hard problem, we currently restrict the size of the target graph
* to at most 64 nodes. If you want to map hypergraphs onto larger target graphs, you can use recursive multisectioning.
* For example, if the target graph has 4096 nodes, you can first map the hypergraph onto a coarser approximation of the
* target graph with 64 nodes, and subsequently map each block of the mapping to the corresponding subgraph of the
* target graph each having 64 nodes.
*/
MT_KAHYPAR_API mt_kahypar_partitioned_hypergraph_t mt_kahypar_map(mt_kahypar_hypergraph_t hypergraph,
mt_kahypar_target_graph_t* target_graph,
mt_kahypar_context_t* context);

/**
* Checks whether or not the given partitioned hypergraph can
* be improved with the corresponding preset.
Expand All @@ -211,6 +255,18 @@ MT_KAHYPAR_API void mt_kahypar_improve_partition(mt_kahypar_partitioned_hypergra
mt_kahypar_context_t* context,
const size_t num_vcycles);

/**
* Improves a given mapping (using the V-cycle technique).
*
* \note The number of nodes of the target graph must be equal to the
* number of blocks of the given partition.
* \note There is no guarantee that this call will find an improvement.
*/
MT_KAHYPAR_API void mt_kahypar_improve_mapping(mt_kahypar_partitioned_hypergraph_t partitioned_hg,
mt_kahypar_target_graph_t* target_graph,
mt_kahypar_context_t* context,
const size_t num_vcycles);

/**
* Constructs a partitioned (hyper)graph out of the given partition.
*/
Expand Down Expand Up @@ -265,6 +321,12 @@ MT_KAHYPAR_API mt_kahypar_hyperedge_weight_t mt_kahypar_km1(const mt_kahypar_par
*/
MT_KAHYPAR_API mt_kahypar_hyperedge_weight_t mt_kahypar_soed(const mt_kahypar_partitioned_hypergraph_t partitioned_hg);

/**
* Computes the steiner tree metric.
*/
MT_KAHYPAR_API mt_kahypar_hyperedge_weight_t mt_kahypar_steiner_tree(const mt_kahypar_partitioned_hypergraph_t partitioned_hg,
mt_kahypar_target_graph_t* target_graph);


/**
* Deletes the partitioned (hyper)graph object.
Expand Down
2 changes: 2 additions & 0 deletions include/libmtkahypartypes.h
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ typedef enum {

struct mt_kahypar_context_s;
typedef struct mt_kahypar_context_s mt_kahypar_context_t;
struct mt_kahypar_target_graph_s;
typedef struct mt_kahypar_target_graph_s mt_kahypar_target_graph_t;

struct mt_kahypar_hypergraph_s;
typedef struct {
Expand Down
67 changes: 67 additions & 0 deletions lib/examples/map_hypergraph_onto_target_graph.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
#include <memory>
#include <vector>
#include <iostream>
#include <thread>

#include <libmtkahypar.h>

// Install library interface via 'sudo make install.mtkahypar' in build folder
// Compile with: g++ -std=c++14 -DNDEBUG -O3 map_hypergraph_onto_target_graph.cc -o example -lmtkahypar
int main(int argc, char* argv[]) {

// Initialize thread pool
mt_kahypar_initialize_thread_pool(
std::thread::hardware_concurrency() /* use all available cores */,
true /* activate interleaved NUMA allocation policy */ );

// Setup partitioning context
mt_kahypar_context_t* context = mt_kahypar_context_new();
mt_kahypar_load_preset(context, DEFAULT /* corresponds to MT-KaHyPar-D */);
// In the following, we map a hypergraph into target graph with 8 nodes
// with an allowed imbalance of 3%
mt_kahypar_set_partitioning_parameters(context,
8 /* number of blocks */, 0.03 /* imbalance parameter */,
KM1 /* objective function - ignored for mappings */, 42 /* seed */);
// Enable logging
mt_kahypar_set_context_parameter(context, VERBOSE, "1");

// Load Hypergraph for DEFAULT preset
mt_kahypar_hypergraph_t hypergraph =
mt_kahypar_read_hypergraph_from_file("ibm01.hgr",
DEFAULT, HMETIS /* file format */);

// Read target graph file in Metis file format
mt_kahypar_target_graph_t* target_graph =
mt_kahypar_read_target_graph_from_file("target.graph");

// Map hypergraph onto target graph
mt_kahypar_partitioned_hypergraph_t partitioned_hg =
mt_kahypar_map(hypergraph, target_graph, context);

// Extract Mapping
std::unique_ptr<mt_kahypar_partition_id_t[]> mapping =
std::make_unique<mt_kahypar_partition_id_t[]>(mt_kahypar_num_hypernodes(hypergraph));
mt_kahypar_get_partition(partitioned_hg, mapping.get());

// Extract Block Weights
std::unique_ptr<mt_kahypar_hypernode_weight_t[]> block_weights =
std::make_unique<mt_kahypar_hypernode_weight_t[]>(8);
mt_kahypar_get_block_weights(partitioned_hg, block_weights.get());

// Compute Metrics
const double imbalance = mt_kahypar_imbalance(partitioned_hg, context);
const double steiner_tree_metric = mt_kahypar_steiner_tree(partitioned_hg, target_graph);

// Output Results
std::cout << "Partitioning Results:" << std::endl;
std::cout << "Imbalance = " << imbalance << std::endl;
std::cout << "Steiner Tree Metric = " << steiner_tree_metric << std::endl;
for ( size_t i = 0; i < 8; ++i ) {
std::cout << "Weight of Block " << i << " = " << block_weights[i] << std::endl;
}

mt_kahypar_free_context(context);
mt_kahypar_free_hypergraph(hypergraph);
mt_kahypar_free_partitioned_hypergraph(partitioned_hg);
mt_kahypar_free_target_graph(target_graph);
}
9 changes: 9 additions & 0 deletions lib/examples/target.graph
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
8 10 1
2 8 3 3
1 10 4 2
1 7 4 7 5 6
2 6 3 10 6 6
3 6 6 5 7 3
4 3 5 10 8 3
5 2 8 8
6 3 7 1
117 changes: 117 additions & 0 deletions lib/libmtkahypar.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
#include "mt-kahypar/partition/partitioner_facade.h"
#include "mt-kahypar/partition/metrics.h"
#include "mt-kahypar/partition/conversion.h"
#include "mt-kahypar/partition/mapping/target_graph.h"
#include "mt-kahypar/parallel/tbb_initializer.h"
#include "mt-kahypar/parallel/stl/scalable_vector.h"
#include "mt-kahypar/io/hypergraph_factory.h"
Expand Down Expand Up @@ -218,6 +219,13 @@ mt_kahypar_hypergraph_t mt_kahypar_read_hypergraph_from_file(const char* file_na
return io::readInputFile(file_name, config, instance, format, stable_construction);
}

mt_kahypar_target_graph_t* mt_kahypar_read_target_graph_from_file(const char* file_name) {
ds::StaticGraph graph = io::readInputFile<ds::StaticGraph>(file_name, FileFormat::Metis, true);
TargetGraph* target_graph = new TargetGraph(std::move(graph));
return reinterpret_cast<mt_kahypar_target_graph_t*>(target_graph);
}


mt_kahypar_hypergraph_t mt_kahypar_create_hypergraph(const mt_kahypar_preset_type_t preset,
const mt_kahypar_hypernode_id_t num_vertices,
const mt_kahypar_hyperedge_id_t num_hyperedges,
Expand Down Expand Up @@ -285,10 +293,33 @@ mt_kahypar_hypergraph_t mt_kahypar_create_graph(const mt_kahypar_preset_type_t p
return mt_kahypar_hypergraph_t { nullptr, NULLPTR_HYPERGRAPH };
}

mt_kahypar_target_graph_t* mt_kahypar_create_target_graph(const mt_kahypar_hypernode_id_t num_vertices,
const mt_kahypar_hyperedge_id_t num_edges,
const mt_kahypar_hypernode_id_t* edges,
const mt_kahypar_hyperedge_weight_t* edge_weights) {
// Transform adjacence array into adjacence list
vec<std::pair<mt_kahypar::HypernodeID, mt_kahypar::HypernodeID>> edge_vector(num_edges);
tbb::parallel_for<mt_kahypar::HyperedgeID>(0, num_edges, [&](const mt_kahypar::HyperedgeID& he) {
edge_vector[he] = std::make_pair(edges[2*he], edges[2*he + 1]);
});

ds::StaticGraph graph = StaticGraphFactory::construct_from_graph_edges(
num_vertices, num_edges, edge_vector, edge_weights, nullptr);
TargetGraph* target_graph = new TargetGraph(std::move(graph));
return reinterpret_cast<mt_kahypar_target_graph_t*>(target_graph);
}


void mt_kahypar_free_hypergraph(mt_kahypar_hypergraph_t hypergraph) {
utils::delete_hypergraph(hypergraph);
}

void mt_kahypar_free_target_graph(mt_kahypar_target_graph_t* target_graph) {
if ( target_graph ) {
delete reinterpret_cast<TargetGraph*>(target_graph);
}
}

mt_kahypar_hypernode_id_t mt_kahypar_num_hypernodes(mt_kahypar_hypergraph_t hypergraph) {
switch ( hypergraph.type ) {
case STATIC_GRAPH: return utils::cast<ds::StaticGraph>(hypergraph).initialNumNodes();
Expand Down Expand Up @@ -360,6 +391,27 @@ mt_kahypar_partitioned_hypergraph_t mt_kahypar_partition(mt_kahypar_hypergraph_t
return mt_kahypar_partitioned_hypergraph_t { nullptr, NULLPTR_PARTITION };
}

mt_kahypar_partitioned_hypergraph_t mt_kahypar_map(mt_kahypar_hypergraph_t hypergraph,
mt_kahypar_target_graph_t* target_graph,
mt_kahypar_context_t* context) {
Context& c = *reinterpret_cast<Context*>(context);
if ( lib::check_if_all_relavant_parameters_are_set(c) ) {
if ( mt_kahypar_check_compatibility(hypergraph, lib::get_preset_c_type(c.partition.preset_type)) ) {
c.partition.instance_type = lib::get_instance_type(hypergraph);
c.partition.partition_type = to_partition_c_type(
c.partition.preset_type, c.partition.instance_type);
lib::prepare_context(c);
c.partition.num_vcycles = 0;
c.partition.objective = Objective::steiner_tree;
TargetGraph* target = reinterpret_cast<TargetGraph*>(target_graph);
return PartitionerFacade::partition(hypergraph, c, target);
} else {
WARNING(lib::incompatibility_description(hypergraph));
}
}
return mt_kahypar_partitioned_hypergraph_t { nullptr, NULLPTR_PARTITION };
}

MT_KAHYPAR_API bool mt_kahypar_check_partition_compatibility(mt_kahypar_partitioned_hypergraph_t partitioned_hg,
mt_kahypar_preset_type_t preset) {
return lib::check_compatibility(partitioned_hg, preset);
Expand All @@ -384,6 +436,28 @@ void mt_kahypar_improve_partition(mt_kahypar_partitioned_hypergraph_t partitione
}
}

void mt_kahypar_improve_mapping(mt_kahypar_partitioned_hypergraph_t partitioned_hg,
mt_kahypar_target_graph_t* target_graph,
mt_kahypar_context_t* context,
const size_t num_vcycles) {
Context& c = *reinterpret_cast<Context*>(context);
if ( lib::check_if_all_relavant_parameters_are_set(c) ) {
if ( mt_kahypar_check_partition_compatibility(
partitioned_hg, lib::get_preset_c_type(c.partition.preset_type)) ) {
c.partition.instance_type = lib::get_instance_type(partitioned_hg);
c.partition.partition_type = to_partition_c_type(
c.partition.preset_type, c.partition.instance_type);
lib::prepare_context(c);
c.partition.num_vcycles = num_vcycles;
c.partition.objective = Objective::steiner_tree;
TargetGraph* target = reinterpret_cast<TargetGraph*>(target_graph);
PartitionerFacade::improve(partitioned_hg, c, target);
} else {
WARNING(lib::incompatibility_description(partitioned_hg));
}
}
}

mt_kahypar_partitioned_hypergraph_t mt_kahypar_create_partitioned_hypergraph(mt_kahypar_hypergraph_t hypergraph,
const mt_kahypar_preset_type_t preset,
const mt_kahypar_partition_id_t num_blocks,
Expand Down Expand Up @@ -553,4 +627,47 @@ mt_kahypar_hyperedge_weight_t mt_kahypar_soed(const mt_kahypar_partitioned_hyper
case NULLPTR_PARTITION: return 0;
}
return 0;
}

mt_kahypar_hyperedge_weight_t mt_kahypar_steiner_tree(const mt_kahypar_partitioned_hypergraph_t partitioned_hg,
mt_kahypar_target_graph_t* target_graph) {
TargetGraph* target = reinterpret_cast<TargetGraph*>(target_graph);
if ( !target->isInitialized() ) {
target->precomputeDistances(4);
}

switch ( partitioned_hg.type ) {
case MULTILEVEL_GRAPH_PARTITIONING:
{
StaticPartitionedGraph& phg = utils::cast<StaticPartitionedGraph>(partitioned_hg);
phg.setTargetGraph(target);
return metrics::quality(phg, Objective::steiner_tree);
}
case N_LEVEL_GRAPH_PARTITIONING:
{
DynamicPartitionedGraph& phg = utils::cast<DynamicPartitionedGraph>(partitioned_hg);
phg.setTargetGraph(target);
return metrics::quality(phg, Objective::steiner_tree);
}
case MULTILEVEL_HYPERGRAPH_PARTITIONING:
{
StaticPartitionedHypergraph& phg = utils::cast<StaticPartitionedHypergraph>(partitioned_hg);
phg.setTargetGraph(target);
return metrics::quality(phg, Objective::steiner_tree);
}
case N_LEVEL_HYPERGRAPH_PARTITIONING:
{
DynamicPartitionedHypergraph& phg = utils::cast<DynamicPartitionedHypergraph>(partitioned_hg);
phg.setTargetGraph(target);
return metrics::quality(phg, Objective::steiner_tree);
}
case LARGE_K_PARTITIONING:
{
SparsePartitionedHypergraph& phg = utils::cast<SparsePartitionedHypergraph>(partitioned_hg);
phg.setTargetGraph(target);
return metrics::quality(phg, Objective::steiner_tree);
}
case NULLPTR_PARTITION: return 0;
}
return 0;
}
3 changes: 1 addition & 2 deletions mt-kahypar/io/sql_plottools_serializer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -149,8 +149,7 @@ std::string serialize(const PartitionedHypergraph& hypergraph,
<< " shuffle_block_size=" << context.shared_memory.shuffle_block_size
<< " static_balancing_work_packages=" << context.shared_memory.static_balancing_work_packages;

if ( context.mapping.target_graph_file != "" &&
context.partition.objective == Objective::steiner_tree ) {
if ( context.partition.objective == Objective::steiner_tree ) {
oss << " target_graph_file=" << context.mapping.target_graph_file.substr(
context.mapping.target_graph_file.find_last_of('/') + 1)
<< " mapping_strategy=" << context.mapping.strategy
Expand Down
3 changes: 1 addition & 2 deletions mt-kahypar/partition/context.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -913,8 +913,7 @@ namespace mt_kahypar {
<< "-------------------------------------------------------------------------------\n"
<< context.refinement
<< "-------------------------------------------------------------------------------\n";
if ( context.mapping.target_graph_file != "" &&
context.partition.objective == Objective::steiner_tree ) {
if ( context.partition.objective == Objective::steiner_tree ) {
str << context.mapping
<< "-------------------------------------------------------------------------------\n";
}
Expand Down
Loading

0 comments on commit e561b72

Please sign in to comment.