Skip to content

Commit

Permalink
Remove numpy dependency. Add c++ interface instructions.
Browse files Browse the repository at this point in the history
  • Loading branch information
DillonZChen committed Jun 16, 2024
1 parent aa6424d commit b6b091e
Show file tree
Hide file tree
Showing 22 changed files with 185 additions and 133 deletions.
65 changes: 65 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
# This CMakeLists.txt file is only for use with C++ projects and not used by setup.py
# See also https://cmake.org/cmake/help/latest/guide/importing-exporting/index.html
cmake_minimum_required(VERSION 3.20)
project(wlplan)

# make cache variables for install destinations
include(GNUInstallDirs)

set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -Wall -Wextra -pedantic -fPIC -O3 -DNDEBUG -fomit-frame-pointer")

# Gather source files
# Do NOT glob src/main.cpp because that is only for creating Python bindings and requires pybind11
file(GLOB_RECURSE SRC_FILES "src/**/*.cpp")

# Define the library target
add_library(wlplan ${SRC_FILES})

# Specify include directories for the target
target_include_directories(wlplan PUBLIC
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/include>
$<INSTALL_INTERFACE:include>
)

# Export the targets to a file
install(TARGETS wlplan
EXPORT wlplanTargets
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
INCLUDES DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
)

# Create and install the package configuration file
include(CMakePackageConfigHelpers)

# Generate the version file for the config file
write_basic_package_version_file(
"${CMAKE_CURRENT_BINARY_DIR}/wlplanConfigVersion.cmake"
VERSION "${WLPLAN_VERSION}"
COMPATIBILITY ExactVersion
)

# Create config file
configure_package_config_file(
"${CMAKE_CURRENT_SOURCE_DIR}/Config.cmake.in"
"${CMAKE_CURRENT_BINARY_DIR}/wlplanConfig.cmake"
INSTALL_DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/wlplan"
)

# Install the config files
install(
FILES
"${CMAKE_CURRENT_BINARY_DIR}/wlplanConfig.cmake"
"${CMAKE_CURRENT_BINARY_DIR}/wlplanConfigVersion.cmake"
DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/wlplan"
)

# Export and install the targets
install(
EXPORT wlplanTargets
FILE wlplanTargets.cmake
DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/wlplan"
)
17 changes: 14 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,18 +1,29 @@
WLPlan
======

[![PyPI version](https://badge.fury.io/py/wlplan.svg)](https://badge.fury.io/py/wlplan)
[![License](https://img.shields.io/pypi/l/wlplan)](https://badge.fury.io/py/wlplan)
[![PyPI version](https://badge.fury.io/py/wlplan.svg)](https://pypi.org/project/wlplan/)
[![License](https://img.shields.io/pypi/l/wlplan)](LICENSE)

WLPlan is a package for generating embeddings of PDDL planning problems for machine learning tasks.

## Installation
The Python interface for WLPlan can be installed simply by
### Python Interface
The Python interface can be installed simply with

pip install wlplan

The PyPI release only supports `python>=3.10`.

### C++ Interface
The C++ interface can be installed in your project by running

./cmake_build.py <path/to/installation>

and adding the following to the root CMakeLists.txt file of your project

list(APPEND CMAKE_PREFIX_PATH "<path/to/installation>")
find_package(wlplan)

## References
For information about the technical details of the underlying algorithm, read the paper [here](https://arxiv.org/abs/2403.16508). The corresponding bib entry is

Expand Down
20 changes: 20 additions & 0 deletions cmake_build.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
#!/usr/bin/env python

import argparse
import subprocess

exec(open("wlplan/__version__.py").read())


def main():
parser = argparse.ArgumentParser()
parser.add_argument("installation_path", type=str)
args = parser.parse_args()

subprocess.check_call(["cmake", "-S", ".", "-B", "build", f"-DWLPLAN_VERSION={__version__}"])
subprocess.check_call(["cmake", "--build", "build", "-j32"])
subprocess.check_call(["cmake", "--install", "build", "--prefix", args.installation_path])


if __name__ == "__main__":
main()
34 changes: 8 additions & 26 deletions docs/examples/blocksworld.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -2,27 +2,9 @@
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"execution_count": null,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Requirement already satisfied: numpy in /home/dzc/anaconda3/envs/wlplan/lib/python3.10/site-packages (1.26.4)\n",
"Requirement already satisfied: scikit-learn in /home/dzc/anaconda3/envs/wlplan/lib/python3.10/site-packages (1.5.0)\n",
"Requirement already satisfied: wlplan in /home/dzc/anaconda3/envs/wlplan/lib/python3.10/site-packages (0.2.4)\n",
"Requirement already satisfied: scipy>=1.6.0 in /home/dzc/anaconda3/envs/wlplan/lib/python3.10/site-packages (from scikit-learn) (1.13.1)\n",
"Requirement already satisfied: joblib>=1.2.0 in /home/dzc/anaconda3/envs/wlplan/lib/python3.10/site-packages (from scikit-learn) (1.4.2)\n",
"Requirement already satisfied: threadpoolctl>=3.1.0 in /home/dzc/anaconda3/envs/wlplan/lib/python3.10/site-packages (from scikit-learn) (3.5.0)\n",
"Requirement already satisfied: networkx>=3.0 in /home/dzc/anaconda3/envs/wlplan/lib/python3.10/site-packages (from wlplan) (3.3)\n",
"Note: you may need to restart the kernel to use updated packages.\n",
"Looking in indexes: https://test.pypi.org/simple/\n",
"Requirement already satisfied: pymimir-dzc-fork==0.1.3 in /home/dzc/anaconda3/envs/wlplan/lib/python3.10/site-packages (0.1.3)\n",
"Note: you may need to restart the kernel to use updated packages.\n"
]
}
],
"outputs": [],
"source": [
"%pip install numpy scikit-learn wlplan\n",
"%pip install -i https://test.pypi.org/simple/ pymimir-dzc-fork==0.1.3"
Expand All @@ -37,7 +19,7 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -61,7 +43,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -85,7 +67,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
Expand Down Expand Up @@ -180,7 +162,7 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": 4,
"metadata": {},
"outputs": [
{
Expand All @@ -201,7 +183,7 @@
" multiset_hash=False,\n",
")\n",
"feature_generator.collect(dataset)\n",
"X = feature_generator.embed(dataset).astype(float)\n",
"X = np.array(feature_generator.embed(dataset)).astype(float)\n",
"y = np.array(y)\n",
"print(f\"{X.shape=}\")\n",
"print(f\"{y.shape=}\")"
Expand All @@ -216,7 +198,7 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": 5,
"metadata": {},
"outputs": [
{
Expand Down
8 changes: 1 addition & 7 deletions docs/index.rst
Original file line number Diff line number Diff line change
@@ -1,13 +1,7 @@
WLPlan Documentation
====================

The Python interface for WLPlan can be installed simply by

.. code-block::
pip install wlplan
The source code of WLPlan is available on `GitHub <https://github.com/DillonZChen/wlplan>`__.
The source code and installation instructions for WLPlan is available on `GitHub <https://github.com/DillonZChen/wlplan>`__.

.. toctree::
:caption: Package Reference
Expand Down
10 changes: 2 additions & 8 deletions include/feature_generation/wl_features.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@
#include "neighbour_container.hpp"

#include <memory>
#include <pybind11/numpy.h>
#include <pybind11/pybind11.h>
#include <string>
#include <vector>

Expand Down Expand Up @@ -67,12 +65,8 @@ namespace feature_generation {
// set problem for graph generator if it exists
void set_problem(const planning::Problem &problem);

// assumes training is done, and returns a numpy feature matrix X
py::array_t<int> embed_np(const data::Dataset dataset);

py::array_t<int> embed_np(const std::vector<graph::Graph> &graphs);

py::array_t<int> embed_np(const planning::State &state);
// assumes training is done, and returns a feature matrix X
std::vector<Embedding> embed(const data::Dataset &dataset);

std::vector<Embedding> embed(const std::vector<graph::Graph> &graphs);

Expand Down
3 changes: 0 additions & 3 deletions include/planning/domain.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,10 @@
#include "predicate.hpp"
#include "state.hpp"

#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
#include <string>
#include <unordered_map>
#include <vector>

namespace py = pybind11;
using json = nlohmann::json;

namespace planning {
Expand Down
26 changes: 0 additions & 26 deletions include/utils/arrays.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@
#define UTILS_HPP

#include <iostream>
#include <pybind11/numpy.h>
#include <pybind11/pybind11.h>
#include <vector>

namespace py = pybind11;
Expand Down Expand Up @@ -35,30 +33,6 @@ namespace utils {
std::cout << "]";
std::cout << std::endl;
}

// https://stackoverflow.com/a/61937194
template <typename T> py::array_t<T> to_np_1d(const std::vector<T> &X) {
int N = (int)X.size();
py::array_t<int, py::array::c_style> X_np({N});
auto ra = X_np.mutable_unchecked();
for (int i = 0; i < N; i++) {
ra(i) = X[i];
};
return X_np;
}

template <typename T> py::array_t<T> to_np_2d(const std::vector<std::vector<T>> &X) {
size_t N = X.size();
size_t M = X[0].size();
py::array_t<int, py::array::c_style> X_np({N, M});
auto ra = X_np.mutable_unchecked();
for (size_t i = 0; i < N; i++) {
for (size_t j = 0; j < M; j++) {
ra(i, j) = X[i][j];
};
};
return X_np;
}
} // namespace utils

#endif // UTILS_HPP
3 changes: 1 addition & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
"_wlplan",
sorted([f for file_group in files for f in file_group]),
# Example: passing in the version to the compiled code
define_macros=[("VERSION_INFO", __version__)],
define_macros=[("WLPLAN_VERSION", __version__)],
),
]

Expand All @@ -37,6 +37,5 @@
python_requires=">=3.10",
install_requires=[
"networkx>=3.0",
"numpy>=1.26",
],
)
23 changes: 4 additions & 19 deletions src/feature_generation/wl_features.cpp
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
#include "../../include/feature_generation/wl_features.hpp"

#include "../../include/graph/graph_generator_factory.hpp"
#include "../../include/utils/arrays.hpp"
#include "../../include/utils/nlohmann/json.hpp"

#include <fstream>
Expand All @@ -17,7 +16,7 @@ namespace feature_generation {
int iterations,
std::string prune_features,
bool multiset_hash)
: package_version(MACRO_STRINGIFY(VERSION_INFO)),
: package_version(MACRO_STRINGIFY(WLPLAN_VERSION)),
graph_representation(graph_representation),
iterations(iterations),
prune_features(prune_features),
Expand All @@ -37,7 +36,7 @@ namespace feature_generation {
std::ifstream i(filename);
json j;
i >> j;
std::string current_package_version = MACRO_STRINGIFY(VERSION_INFO);
std::string current_package_version = MACRO_STRINGIFY(WLPLAN_VERSION);

// load configurations
package_version = j["package_version"];
Expand Down Expand Up @@ -285,26 +284,12 @@ namespace feature_generation {
}
}

py::array_t<int> WLFeatures::embed_np(const data::Dataset dataset) {
std::vector<Embedding> WLFeatures::embed(const data::Dataset &dataset) {
std::vector<graph::Graph> graphs = convert_to_graphs(dataset);
if (graphs.size() == 0) {
throw std::runtime_error("No graphs to embed");
}
return embed_np(graphs);
}

py::array_t<int> WLFeatures::embed_np(const std::vector<graph::Graph> &graphs) {
if (graphs.size() == 0) {
throw std::runtime_error("No graphs to embed");
}
std::vector<Embedding> X = embed(graphs);
return utils::to_np_2d(X);
}

py::array_t<int> WLFeatures::embed_np(const planning::State &state) {
graph::Graph graph = *(graph_generator->to_graph(state));
Embedding x = embed(graph);
return utils::to_np_1d(x);
return embed(graphs);
}

std::vector<Embedding> WLFeatures::embed(const std::vector<graph::Graph> &graphs) {
Expand Down
10 changes: 5 additions & 5 deletions src/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -199,11 +199,11 @@ wl_features
"graphs"_a)
.def("set_problem", &feature_generation::WLFeatures::set_problem,
"problem"_a)
.def("embed", py::overload_cast<const data::Dataset>(&feature_generation::WLFeatures::embed_np),
.def("embed", py::overload_cast<const data::Dataset &>(&feature_generation::WLFeatures::embed),
"dataset"_a)
.def("embed", py::overload_cast<const std::vector<graph::Graph> &>(&feature_generation::WLFeatures::embed_np),
.def("embed", py::overload_cast<const std::vector<graph::Graph> &>(&feature_generation::WLFeatures::embed),
"graphs"_a)
.def("embed", py::overload_cast<const planning::State &>(&feature_generation::WLFeatures::embed_np),
.def("embed", py::overload_cast<const planning::State &>(&feature_generation::WLFeatures::embed),
"state"_a)
.def("get_n_features", &feature_generation::WLFeatures::get_n_features)
.def("get_seen_counts", &feature_generation::WLFeatures::get_seen_counts)
Expand All @@ -218,8 +218,8 @@ wl_features
.def("save", &feature_generation::WLFeatures::save);

/* Version */
#ifdef VERSION_INFO
m.attr("__version__") = MACRO_STRINGIFY(VERSION_INFO);
#ifdef WLPLAN_VERSION
m.attr("__version__") = MACRO_STRINGIFY(WLPLAN_VERSION);
#else
m.attr("__version__") = "dev";
#endif
Expand Down
Loading

0 comments on commit b6b091e

Please sign in to comment.