From 549ea71242e44fa6df2ab99cb6fcdb9ea3f4ef8c Mon Sep 17 00:00:00 2001 From: Wey Gu Date: Fri, 17 Mar 2023 05:35:06 +0000 Subject: [PATCH] feat: ngdi_api initial version poc passed, now implemented in lib --- .gitignore | 3 + README.md | 84 ++++++++----- docs/Environment_Setup.md | 44 +++++-- docs/ngdi_API_Gateway.md | 89 ++++++++++++- examples/ngdi_from_ngql_udf.ipynb | 134 +++----------------- examples/run_ngdi_api.py | 3 + ngdi/__init__.py | 2 + ngdi/ngdi_api/__init__.py | 0 ngdi/ngdi_api/__main__.py | 8 ++ ngdi/ngdi_api/app.py | 124 +++++++++++++++++++ pdm.lock | 199 +++++++++++++++++++++++++++++- pyproject.toml | 6 +- 12 files changed, 538 insertions(+), 158 deletions(-) create mode 100644 examples/run_ngdi_api.py create mode 100644 ngdi/ngdi_api/__init__.py create mode 100644 ngdi/ngdi_api/__main__.py create mode 100644 ngdi/ngdi_api/app.py diff --git a/.gitignore b/.gitignore index 171bbbf..31f289d 100644 --- a/.gitignore +++ b/.gitignore @@ -131,3 +131,6 @@ dmypy.json # pdm .pdm.toml + +# requirments.txt is only for local development +requirements.txt \ No newline at end of file diff --git a/README.md b/README.md index 687a85e..8878468 100644 --- a/README.md +++ b/README.md @@ -119,35 +119,61 @@ Basically the same as Spark Engine, but with `engine="nebula"`. ngdi is an unified abstraction layer for different engines, the current implementation is based on Spark, NetworkX, DGL and NebulaGraph, but it's easy to extend to other engines like Flink, GraphScope, PyG etc. ``` - ┌───────────────────────────────────────────────────┐ - │ Spark Cluster │ - │ .─────. .─────. .─────. .─────. │ - ┌─▶│ : ; : ; : ; : ; │ - │ │ `───' `───' `───' `───' │ -Algorithm │ - Spark └───────────────────────────────────────────────────┘ - Engine ┌────────────────────────────────────────────────────────────────┐ - └──┤ │ - │ NebulaGraph Data Intelligence Suite(ngdi) │ - │ ┌────────┐ ┌──────┐ ┌────────┐ ┌─────┐ │ - │ │ Reader │ │ Algo │ │ Writer │ │ GNN │ │ - │ └────────┘ └──────┘ └────────┘ └─────┘ │ - │ ├────────────┴───┬────────┴─────┐ └──────┐ │ - │ ▼ ▼ ▼ ▼ │ - │ ┌─────────────┐ ┌──────────────┐ ┌──────────┐┌───────────┐ │ - ┌──┤ │ SparkEngine │ │ NebulaEngine │ │ NetworkX ││ DGLEngine │ │ - │ │ └─────────────┘ └──────────────┘ └──────────┘└───────────┘ │ - │ └──────────┬─────────────────────────────────────────────────────┘ - │ │ Spark - │ └────────Reader ────────────┐ -Spark Reader Query Mode │ -Scan Mode ▼ - │ ┌───────────────────────────────────────────────────┐ - │ │ NebulaGraph Graph Engine Nebula-GraphD │ - │ ├──────────────────────────────┬────────────────────┤ - │ │ NebulaGraph Storage Engine │ │ - └─▶│ Nebula-StorageD │ Nebula-Metad │ - └──────────────────────────────┴────────────────────┘ + ┌───────────────────────────────────────────────────┐ + │ Spark Cluster │ + │ .─────. .─────. .─────. .─────. │ + │ ; : ; : ; : ; : │ + ┌─▶│ : ; : ; : ; : ; │ + │ │ ╲ ╱ ╲ ╱ ╲ ╱ ╲ ╱ │ + │ │ `───' `───' `───' `───' │ + Algo Spark │ + Engine└───────────────────────────────────────────────────┘ + │ ┌────────────────────────────────────────────────────┬──────────┐ + └──┤ │ │ + │ NebulaGraph Data Intelligence Suite(ngdi) │ ngdi-api │◀─┐ + │ │ │ │ + │ └──────────┤ │ + │ ┌────────┐ ┌──────┐ ┌────────┐ ┌─────┐ │ │ + │ │ Reader │ │ Algo │ │ Writer │ │ GNN │ │ │ + ┌───────▶│ └────────┘ └──────┘ └────────┘ └─────┘ │ │ + │ │ │ │ │ │ │ │ + │ │ ├────────────┴───┬────────┴─────┐ └──────┐ │ │ + │ │ ▼ ▼ ▼ ▼ │ │ + │ │ ┌─────────────┐ ┌──────────────┐ ┌──────────┐┌──────────┐ │ │ + │ ┌──┤ │ SparkEngine │ │ NebulaEngine │ │ NetworkX ││ DGLEngine│ │ │ + │ │ │ └─────────────┘ └──────────────┘ └──────────┘└──────────┘ │ │ + │ │ └──────────┬────────────────────────────────────────────────────┘ │ + │ │ │ Spark │ + │ │ └────────Reader ────────────┐ │ + │ Spark Query Mode │ │ + │ Reader │ │ + │Scan Mode ▼ ┌─────────┐ + │ │ ┌───────────────────────────────────────────────────┬─────────┤ ngdi-udf│◀─────────────┐ + │ │ │ │ └─────────┤ │ + │ │ │ NebulaGraph Graph Engine Nebula-GraphD │ ngdi-GraphD │ │ + │ │ ├──────────────────────────────┬────────────────────┼───────────────────┘ │ + │ │ │ │ │ │ + │ │ │ NebulaGraph Storage Engine │ │ │ + │ │ │ │ │ │ + │ └─▶│ Nebula-StorageD │ Nebula-Metad │ │ + │ │ │ │ │ + │ └──────────────────────────────┴────────────────────┘ │ + │ │ + │ ┌───────────────────────────────────────────────────────────────────────────────────────┐ │ + │ │ RETURN ngdi("pagerank", ["follow"], ["degree"], "spark", {space: "basketballplayer"}) │──┘ + │ └───────────────────────────────────────────────────────────────────────────────────────┘ + │ ┌─────────────────────────────────────────────────────────────┐ + │ │ from ngdi import NebulaReader │ + │ │ │ + │ │ # read data with spark engine, scan mode │ + │ │ reader = NebulaReader(engine="spark") │ + │ │ reader.scan(edge="follow", props="degree") │ + └──│ df = reader.read() │ + │ │ + │ # run pagerank algorithm │ + │ pr_result = df.algo.pagerank(reset_prob=0.15, max_iter=10) │ + │ │ + └─────────────────────────────────────────────────────────────┘ ``` ### Spark Engine Prerequisites diff --git a/docs/Environment_Setup.md b/docs/Environment_Setup.md index e055cc5..fe7214f 100644 --- a/docs/Environment_Setup.md +++ b/docs/Environment_Setup.md @@ -41,7 +41,7 @@ Just visit [http://localhost:7001](http://localhost:7001) in your browser, with: - user: `root` - password: `nebula` -## Rin In Production +## Run In Production ### Run on PySpark Jupyter Notebook @@ -98,15 +98,45 @@ pr_result = df.algo.pagerank(reset_prob=0.15, max_iter=10) Then we can submit the job to Spark cluster: ```bash -spark-submit --master spark://master:7077 \ - --driver-class-path /opt/nebulagraph/ngdi/package/nebula-spark-connector.jar \ - --driver-class-path /opt/nebulagraph/ngdi/package/nebula-algo.jar \ - --jars /opt/nebulagraph/ngdi/package/nebula-spark-connector.jar \ - --jars /opt/nebulagraph/ngdi/package/nebula-algo.jar \ - --py-files /opt/nebulagraph/ngdi/package/ngdi-py3-env.zip \ +spark-submit --master spark://sparkmaster:7077 \ + --driver-class-path /nebula-spark-connector.jar \ + --driver-class-path /nebula-algo.jar \ + --jars /nebula-spark-connector.jar \ + --jars /nebula-algo.jar \ + --py-files /ngdi-py3-env.zip \ pagerank.py ``` +## Prepare for py-files + +```bash +pip install pdm +# prepare dep list in ngdi codebase +pdm export -o dist/requirements.txt --without-hashes +# build a wheel for ngdi +pdm build +# output it to dependencies +pip install -r dist/requirements.txt --target dist/dependencies +pip install . --target dist/dependencies +# zip dependencies and ngdi wheel +cd dist +zip -r ngdi-py3-env.zip dependencies +# copy ngdi-py3-env.zip to hdfs +hdfs dfs -put ngdi-py3-env.zip / +# check it's there +hdfs dfs -ls / +``` + +Now we have all files ready: + +```bash +# hdfs dfs -ls / +Found 4 items +-rw-r--r-- 3 root supergroup 167042166 2023-03-17 03:54 /nebula-algo.jar +-rw-r--r-- 3 root supergroup 165992037 2023-03-17 03:54 /nebula-spark-connector.jar +-rw-r--r-- 3 root supergroup 5068513 2023-03-17 03:52 /ngdi-py3-env.zip +``` + ### Run ngdi algorithm PySpark job from python script We have everything ready as above, including the `pagerank.py`. diff --git a/docs/ngdi_API_Gateway.md b/docs/ngdi_API_Gateway.md index 9923c09..2c7c1cd 100644 --- a/docs/ngdi_API_Gateway.md +++ b/docs/ngdi_API_Gateway.md @@ -1,14 +1,99 @@ +# ngdi API Gateway + +ngdi API Gateway is a RESTful API server that provides a unified interface for ngdi algorithms. + +With ngdi API Gateway and ngdi UDF, we could call ngdi algorithms from ngql. + ## Calling from ngql ```cypher -RETURN ngdi("pagerank", ["follow"], ["degree"], "compact") +RETURN ngdi("pagerank", ["follow"], ["degree"], "spark", {space: "basketballplayer", max_iter: 10}, {write_mode: "insert"}) ``` ## Setup ngdi API Gateway -See: [../examples/ngdi_from_ngql_udf.ipynb](https://github.com/wey-gu/nebulagraph-di/blob/main/examples/ngdi_from_ngql_udf.ipynb) +For Spark engine, we could run it from the Spark Juptyer Notebook, see: [../examples/ngdi_from_ngql_udf.ipynb](https://github.com/wey-gu/nebulagraph-di/blob/main/examples/ngdi_from_ngql_udf.ipynb) + +For NetworkX engine, we could run it in same way as it was in Jupyter Notebook, see: [../examples/run_ngdi_api.py](https://github.com/wey-gu/nebulagraph-di/blob/main/examples/run_ngdi_api.py) + +Or you could call with `pdm`: + +```bash +export NGDI_PORT=9999 +pdm run ngdi-api +``` ## UDF build See https://github.com/wey-gu/nebula/tree/ngdi_udf + +- Build binary `ngdi.so` file + +```bash +export TAG=ubuntu2004 +docker run -ti \ + --network nebula-net \ + --security-opt seccomp=unconfined \ + -v "$PWD":/home/nebula \ + -w /home/nebula \ + --name nebula_dev \ + vesoft/nebula-dev:$TAG \ + bash + +mkdir build && cd build +cmake -DCMAKE_CXX_COMPILER=$TOOLSET_CLANG_DIR/bin/g++ -DCMAKE_C_COMPILER=$TOOLSET_CLANG_DIR/bin/gcc -DENABLE_WERROR=OFF -DCMAKE_BUILD_TYPE=Release -DENABLE_TESTING=OFF .. + +cd ../udf +make UDF=ngdi +``` + +## Setup ngdi-graphd + +The ngdi-graphd is just a graphd with ngdi UDF installed. + +We just need to put the `ngdi.so` file into one path of graphd like `/udf/`, and then set the `--udf_path` to this path together with `--enable_udf=true`. + +- Note that the `ngdi.so` file should be built in the same environment as the graphd. +- The ngdi.so should be granted the `x` permission. (`chmod +x ngdi.so`) +- The ngdi-api's url should be set in the `ngdi_gateway_url_prefix` environment variable. i.e. `export ngdi_gateway_url_prefix=http://jupyter:9999"`. + +Example docker compose: + +```yaml + graphd: + image: weygu/ngdi-graphd:2023.03.13 + container_name: ngdi_graphd + environment: + USER: root + TZ: "${TZ:-Asia/Shanghai}" + ngdi_gateway_url_prefix: "http://jupyter:9999" + command: + - --meta_server_addrs=metad0:9559,metad1:9559,metad2:9559 + - --port=9669 + - --local_ip=ngdi_graphd + - --ws_ip=ngdi_graphd + - --ws_http_port=19669 + - --log_dir=/logs + - --v=5 + - --enable_udf=true + - --udf_path=/udf/ + healthcheck: + test: ["CMD", "curl", "-sf", "http://ngdi_graphd:19669/status"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 20s + ports: + - "29669:9669" + - 19669 + - 19670 + volumes: + - ./logs/graph:/logs + - ./udf:/udf + networks: + - nebula-net + restart: on-failure + cap_add: + - SYS_PTRACE +``` \ No newline at end of file diff --git a/examples/ngdi_from_ngql_udf.ipynb b/examples/ngdi_from_ngql_udf.ipynb index 9275936..acaadd8 100644 --- a/examples/ngdi_from_ngql_udf.ipynb +++ b/examples/ngdi_from_ngql_udf.ipynb @@ -6,11 +6,11 @@ "id": "ee4221c8", "metadata": {}, "source": [ - "### Run ngdi API gateway to handle call with HTTP\n", + "### Call ngdi API\n", "\n", "It's either a cURL call or a UDF query from ngdi-graphd.\n", "\n", - "- Query from ngdi-graphd\n", + "#### Query from ngdi-graphd\n", "\n", "```cypher\n", "-- Prepare the write schema\n", @@ -29,7 +29,7 @@ "- `mode`: the mode(engine) to be used in the algorithm, e.g. `spark`, `networkx`\n", "- `algo_context`: the context to be used in the algorithm, e.g. `{space: \"basketballplayer\", max_iter: 10}`\n", "\n", - "- Call with cURL\n", + "#### Call with cURL\n", "\n", "```bash\n", "curl -X POST \\\n", @@ -64,122 +64,12 @@ ] }, { - "cell_type": "code", - "execution_count": null, - "id": "a94e7d58", + "attachments": {}, + "cell_type": "markdown", + "id": "69662a94", "metadata": {}, - "outputs": [], "source": [ - "import os\n", - "\n", - "from ngdi import NebulaReader, NebulaWriter\n", - "from ngdi.config import NebulaGraphConfig\n", - "\n", - "from flask import Flask, request\n", - "app = Flask(__name__)\n", - "\n", - "\n", - "def get_nebulagraph_config(space=\"basketballplayer\"):\n", - " # get credentials from env\n", - " graphd_hosts = os.getenv(\"GRAPHD_HOSTS\", \"graphd:9669\")\n", - " metad_hosts = os.getenv(\"METAD_HOSTS\", \"metad0:9559,metad1:9559,metad2:9559\")\n", - " user = os.getenv(\"USER\", \"root\")\n", - " password = os.getenv(\"PASSWORD\", \"nebula\")\n", - "\n", - " return NebulaGraphConfig(\n", - " graphd_hosts = graphd_hosts,\n", - " metad_hosts = metad_hosts,\n", - " user = user,\n", - " password = password,\n", - " space = space\n", - " )\n", - "\n", - "@app.route('/api/v0/spark/', methods=['GET'])\n", - "def test(algo_name):\n", - " return {\"status\": \"OK\"}\n", - "\n", - "@app.route('/api/v0/spark/', methods=['POST'])\n", - "def parallel(algo_name):\n", - " data = request.get_json()\n", - "\n", - " try:\n", - " # get algo_context\n", - " algo_context = data.get(\"algo_context\")\n", - " assert algo_context is not None, \"algo_context should not be None\"\n", - " assert algo_context.get(\"space\") is not None, \"space should not be None\"\n", - " except Exception as e:\n", - " print(e)\n", - " return {\"error\": f\"algo context parsing failed: {e}\"}\n", - " space = algo_context.get(\"space\")\n", - " nebula_config = get_nebulagraph_config(space=space)\n", - "\n", - " reader = NebulaReader(engine=\"spark\")\n", - " # get read_context\n", - " try:\n", - " read_context = data.get(\"read_context\")\n", - " read_mode = read_context.get(\"read_mode\")\n", - " edges = read_context.get(\"edge_types\")\n", - " edge_weights = read_context.get(\"edge_weights\")\n", - " \n", - " assert len(edges) == len(edge_weights) and len(edges) > 0, \"edges and edge_weights should have the same length and length > 0\"\n", - " # TBD, it seems that the reader.scan() need to support more than one edge type\n", - " # https://github.com/wey-gu/nebulagraph-di/issues/19\n", - " # need to query all and union them.\n", - " if read_mode == \"scan\":\n", - " reader.scan(edge=edges[0], props=edge_weights[0])\n", - " elif read_mode == \"query\":\n", - " query = read_context.get(\"query\")\n", - " assert query is not None, \"query should not be None\"\n", - " reader.query(query, edge=edges[0], props=edge_weights[0])\n", - " # TODO(wey): need to revisit the query and scan API, to align them.\n", - " # ref: https://github.com/vesoft-inc/nebula-algorithm/blob/master/nebula-algorithm/src/main/scala/com/vesoft/nebula/algorithm/Main.scala\n", - " # ref: https://github.com/vesoft-inc/nebula-algorithm/blob/master/nebula-algorithm/src/main/scala/com/vesoft/nebula/algorithm/reader/DataReader.scala\n", - " # ref: https://github.com/vesoft-inc/nebula-spark-connector/blob/master/example/src/main/scala/com/vesoft/nebula/examples/connector/NebulaSparkReaderExample.scala\n", - " df = reader.read()\n", - " except Exception as e:\n", - " # TBD, need to return error code, return empty json for now\n", - " print(e)\n", - " return {\"error\": f\"read failed: {e}\"}\n", - " try:\n", - " # ensure the algo_name is supported\n", - " assert algo_name in df.algo.get_all_algo(), f\"{algo_name} is not supported\"\n", - "\n", - " algo_config = dict(algo_context)\n", - " algo_config.pop(\"space\")\n", - " algo_config.pop(\"name\")\n", - " # call df.algo.algo_name(**algo_config)\n", - " algo_result = getattr(df.algo, algo_name)(**algo_config)\n", - " except Exception as e:\n", - " # TBD, need to return error code, return empty json for now\n", - " print(e)\n", - " return {\"error\": f\"algo execution failed: {e}\"}\n", - "\n", - " try:\n", - " # get write_context\n", - " write_context = data.get(\"write_context\")\n", - " write_mode = write_context.get(\"write_mode\")\n", - " properties = write_context.get(\"properties\", {})\n", - " batch_size = write_context.get(\"batch_size\", 256)\n", - " # TBD, need to support more than one edge type\n", - " writer = NebulaWriter(data=algo_result, sink=\"nebulagraph_vertex\", config=nebula_config, engine=\"spark\")\n", - " writer.set_options(\n", - " tag=algo_name,\n", - " vid_field=\"_id\",\n", - " properties=properties,\n", - " batch_size=batch_size,\n", - " write_mode=write_mode,\n", - " )\n", - " response = writer.write()\n", - " except Exception as e:\n", - " # TBD, need to return error code, return empty json for now\n", - " print(e)\n", - " return {\"error\": f\"write failed: {e}\"}\n", - " # return reader result's stats, algo result's stats, writer result\n", - " return {\n", - " \"reader_result_stats\": list(map(lambda r: r.asDict(), df.data.summary().collect())),\n", - " \"algo_result_stats\": list(map(lambda r: r.asDict(), writer.raw_df.summary().collect())),\n", - " \"writer_result\": response is None or response,\n", - " }\n" + "## Run ngdi API gateway:" ] }, { @@ -189,9 +79,17 @@ "metadata": {}, "outputs": [], "source": [ - "app.run(host='0.0.0.0', port=9999)" + "from ngdi import ngdi_api_app as app\n", + "\n", + "app.run(port=9999, host='0.0.0.0')" ] }, + { + "cell_type": "markdown", + "id": "1b5b1c7d", + "metadata": {}, + "source": [] + }, { "attachments": {}, "cell_type": "markdown", diff --git a/examples/run_ngdi_api.py b/examples/run_ngdi_api.py new file mode 100644 index 0000000..86cebdf --- /dev/null +++ b/examples/run_ngdi_api.py @@ -0,0 +1,3 @@ +from ngdi import ngdi_api_app as app + +app.run(port=9999, host='0.0.0.0') diff --git a/ngdi/__init__.py b/ngdi/__init__.py index e193f15..6dfff4c 100644 --- a/ngdi/__init__.py +++ b/ngdi/__init__.py @@ -11,6 +11,7 @@ from ngdi.nebula_algo import NebulaAlgorithm from ngdi.nebula_gnn import NebulaGNN from ngdi.config import NebulaGraphConfig +from ngdi.ngdi_api.app import app as ngdi_api_app # export __all__ = ( @@ -19,4 +20,5 @@ "NebulaAlgorithm", "NebulaGNN", "NebulaGraphConfig", + "ngdi_api_app", ) diff --git a/ngdi/ngdi_api/__init__.py b/ngdi/ngdi_api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/ngdi/ngdi_api/__main__.py b/ngdi/ngdi_api/__main__.py new file mode 100644 index 0000000..a9eaa01 --- /dev/null +++ b/ngdi/ngdi_api/__main__.py @@ -0,0 +1,8 @@ +from ngdi import ngdi_api_app as app + +import os + + +def run(): + print(dir(app)) + app.run(host="0.0.0.0", port=int(os.environ.get("NGDI_PORT", 9999))) diff --git a/ngdi/ngdi_api/app.py b/ngdi/ngdi_api/app.py new file mode 100644 index 0000000..78bd09a --- /dev/null +++ b/ngdi/ngdi_api/app.py @@ -0,0 +1,124 @@ +import os + +from ngdi import NebulaReader, NebulaWriter +from ngdi.config import NebulaGraphConfig + +from flask import Flask, request + +app = Flask(__name__) + + +def get_nebulagraph_config(space="basketballplayer"): + # get credentials from env + graphd_hosts = os.getenv("GRAPHD_HOSTS", "graphd:9669") + metad_hosts = os.getenv("METAD_HOSTS", "metad0:9559,metad1:9559,metad2:9559") + user = os.getenv("USER", "root") + password = os.getenv("PASSWORD", "nebula") + + return NebulaGraphConfig( + graphd_hosts=graphd_hosts, + metad_hosts=metad_hosts, + user=user, + password=password, + space=space, + ) + + +@app.route("/api/v0/spark", methods=["GET"]) +def parallel_healthcheck(): + return {"status": "OK"} + + +@app.route("/api/v0/spark/", methods=["POST"]) +def parallel(algo_name): + data = request.get_json() + + try: + # get algo_context + algo_context = data.get("algo_context") + assert algo_context is not None, "algo_context should not be None" + assert algo_context.get("space") is not None, "space should not be None" + except Exception as e: + print(e) + return {"error": f"algo context parsing failed: {e}"} + space = algo_context.get("space") + nebula_config = get_nebulagraph_config(space=space) + + reader = NebulaReader(engine="spark") + # get read_context + try: + read_context = data.get("read_context") + read_mode = read_context.get("read_mode") + edges = read_context.get("edge_types") + edge_weights = read_context.get("edge_weights") + + assert ( + len(edges) == len(edge_weights) and len(edges) > 0 + ), "edges and edge_weights should have the same length and length > 0" + # TBD, it seems that the reader.scan() need to support more than one edge type + # https://github.com/wey-gu/nebulagraph-di/issues/19 + # need to query all and union them. + if read_mode == "scan": + reader.scan(edge=edges[0], props=edge_weights[0]) + elif read_mode == "query": + query = read_context.get("query") + assert query is not None, "query should not be None" + reader.query(query, edge=edges[0], props=edge_weights[0]) + # TODO(wey): need to revisit the query and scan API, to align them. + # ref: https://github.com/vesoft-inc/nebula-algorithm/blob/master/nebula-algorithm/src/main/scala/com/vesoft/nebula/algorithm/Main.scala + # ref: https://github.com/vesoft-inc/nebula-algorithm/blob/master/nebula-algorithm/src/main/scala/com/vesoft/nebula/algorithm/reader/DataReader.scala + # ref: https://github.com/vesoft-inc/nebula-spark-connector/blob/master/example/src/main/scala/com/vesoft/nebula/examples/connector/NebulaSparkReaderExample.scala + df = reader.read() + except Exception as e: + # TBD, need to return error code, return empty json for now + print(e) + return {"error": f"read failed: {e}"} + try: + # ensure the algo_name is supported + assert algo_name in df.algo.get_all_algo(), f"{algo_name} is not supported" + + algo_config = dict(algo_context) + algo_config.pop("space") + algo_config.pop("name") + # call df.algo.algo_name(**algo_config) + algo_result = getattr(df.algo, algo_name)(**algo_config) + except Exception as e: + # TBD, need to return error code, return empty json for now + print(e) + return {"error": f"algo execution failed: {e}"} + + try: + # get write_context + write_context = data.get("write_context") + write_mode = write_context.get("write_mode") + properties = write_context.get("properties", {}) + batch_size = write_context.get("batch_size", 256) + # TBD, need to support more than one edge type + writer = NebulaWriter( + data=algo_result, + sink="nebulagraph_vertex", + config=nebula_config, + engine="spark", + ) + writer.set_options( + tag=algo_name, + vid_field="_id", + properties=properties, + batch_size=batch_size, + write_mode=write_mode, + ) + response = writer.write() + except Exception as e: + # TBD, need to return error code, return empty json for now + print(e) + return {"error": f"write failed: {e}"} + # return reader result's stats, algo result's stats, writer result + return { + "reader_result_stats": list( + map(lambda r: r.asDict(), df.data.summary().collect()) + ), + "algo_result_stats": list( + map(lambda r: r.asDict(), writer.raw_df.summary().collect()) + ), + "writer_result": response is None or response, + } diff --git a/pdm.lock b/pdm.lock index ec02f06..e3c2159 100644 --- a/pdm.lock +++ b/pdm.lock @@ -1,12 +1,77 @@ # This file is @generated by PDM. # It is not intended for manual editing. +[[package]] +name = "click" +version = "8.0.4" +requires_python = ">=3.6" +summary = "Composable command line interface toolkit" +dependencies = [ + "colorama; platform_system == \"Windows\"", + "importlib-metadata; python_version < \"3.8\"", +] + +[[package]] +name = "colorama" +version = "0.4.5" +requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +summary = "Cross-platform colored terminal text." + +[[package]] +name = "dataclasses" +version = "0.8" +requires_python = ">=3.6, <3.7" +summary = "A backport of the dataclasses module for Python 3.6" + [[package]] name = "decorator" version = "4.4.2" requires_python = ">=2.6, !=3.0.*, !=3.1.*" summary = "Decorators for Humans" +[[package]] +name = "flask" +version = "2.0.3" +requires_python = ">=3.6" +summary = "A simple framework for building complex web applications." +dependencies = [ + "Jinja2>=3.0", + "Werkzeug>=2.0", + "click>=7.1.2", + "itsdangerous>=2.0", +] + +[[package]] +name = "importlib-metadata" +version = "4.8.3" +requires_python = ">=3.6" +summary = "Read metadata from Python packages" +dependencies = [ + "typing-extensions>=3.6.4; python_version < \"3.8\"", + "zipp>=0.5", +] + +[[package]] +name = "itsdangerous" +version = "2.0.1" +requires_python = ">=3.6" +summary = "Safely pass data to untrusted environments and back." + +[[package]] +name = "jinja2" +version = "3.0.3" +requires_python = ">=3.6" +summary = "A very fast and expressive template engine." +dependencies = [ + "MarkupSafe>=2.0", +] + +[[package]] +name = "markupsafe" +version = "2.0.1" +requires_python = ">=3.6" +summary = "Safely add untrusted strings to HTML/XML markup." + [[package]] name = "networkx" version = "2.5.1" @@ -30,15 +95,135 @@ dependencies = [ "py4j==0.10.9.5", ] +[[package]] +name = "typing-extensions" +version = "4.1.1" +requires_python = ">=3.6" +summary = "Backported and Experimental Type Hints for Python 3.6+" + +[[package]] +name = "werkzeug" +version = "2.0.3" +requires_python = ">=3.6" +summary = "The comprehensive WSGI web application library." +dependencies = [ + "dataclasses; python_version < \"3.7\"", +] + +[[package]] +name = "zipp" +version = "3.6.0" +requires_python = ">=3.6" +summary = "Backport of pathlib-compatible object wrapper for zip files" + [metadata] lock_version = "4.1" -content_hash = "sha256:fc3df848c7def9029dac474e649472a4c1ca3c452fd124991898dfd5e24ece7c" +content_hash = "sha256:56676eb56b24ebcf1dbb6e2cbc8825447f15de7d3e3814c1893c9b396169e1a4" [metadata.files] +"click 8.0.4" = [ + {url = "https://files.pythonhosted.org/packages/4a/a8/0b2ced25639fb20cc1c9784de90a8c25f9504a7f18cd8b5397bd61696d7d/click-8.0.4-py3-none-any.whl", hash = "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1"}, + {url = "https://files.pythonhosted.org/packages/dd/cf/706c1ad49ab26abed0b77a2f867984c1341ed7387b8030a6aa914e2942a0/click-8.0.4.tar.gz", hash = "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb"}, +] +"colorama 0.4.5" = [ + {url = "https://files.pythonhosted.org/packages/2b/65/24d033a9325ce42ccbfa3ca2d0866c7e89cc68e5b9d92ecaba9feef631df/colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, + {url = "https://files.pythonhosted.org/packages/77/8b/7550e87b2d308a1b711725dfaddc19c695f8c5fa413c640b2be01662f4e6/colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, +] +"dataclasses 0.8" = [ + {url = "https://files.pythonhosted.org/packages/1f/12/7919c5d8b9c497f9180db15ea8ead6499812ea8264a6ae18766d93c59fe5/dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, + {url = "https://files.pythonhosted.org/packages/fe/ca/75fac5856ab5cfa51bbbcefa250182e50441074fdc3f803f6e76451fab43/dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, +] "decorator 4.4.2" = [ {url = "https://files.pythonhosted.org/packages/da/93/84fa12f2dc341f8cf5f022ee09e109961055749df2d0c75c5f98746cfe6c/decorator-4.4.2.tar.gz", hash = "sha256:e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7"}, {url = "https://files.pythonhosted.org/packages/ed/1b/72a1821152d07cf1d8b6fce298aeb06a7eb90f4d6d41acec9861e7cc6df0/decorator-4.4.2-py2.py3-none-any.whl", hash = "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760"}, ] +"flask 2.0.3" = [ + {url = "https://files.pythonhosted.org/packages/84/9d/66347e6b3e2eb78647392d3969c23bdc2d8b2fdc32bd078c817c15cb81ad/Flask-2.0.3.tar.gz", hash = "sha256:e1120c228ca2f553b470df4a5fa927ab66258467526069981b3eb0a91902687d"}, + {url = "https://files.pythonhosted.org/packages/cd/77/59df23681f4fd19b7cbbb5e92484d46ad587554f5d490f33ef907e456132/Flask-2.0.3-py3-none-any.whl", hash = "sha256:59da8a3170004800a2837844bfa84d49b022550616070f7cb1a659682b2e7c9f"}, +] +"importlib-metadata 4.8.3" = [ + {url = "https://files.pythonhosted.org/packages/85/ed/e65128cc5cb1580f22ee3009d9187ecdfcc43ffb3b581fe854b24e87d8e7/importlib_metadata-4.8.3.tar.gz", hash = "sha256:766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668"}, + {url = "https://files.pythonhosted.org/packages/a0/a1/b153a0a4caf7a7e3f15c2cd56c7702e2cf3d89b1b359d1f1c5e59d68f4ce/importlib_metadata-4.8.3-py3-none-any.whl", hash = "sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e"}, +] +"itsdangerous 2.0.1" = [ + {url = "https://files.pythonhosted.org/packages/58/66/d6c5859dcac92b442626427a8c7a42322068c5cd5d4a463ce78b93f730b7/itsdangerous-2.0.1.tar.gz", hash = "sha256:9e724d68fc22902a1435351f84c3fb8623f303fffcc566a4cb952df8c572cff0"}, + {url = "https://files.pythonhosted.org/packages/9c/96/26f935afba9cd6140216da5add223a0c465b99d0f112b68a4ca426441019/itsdangerous-2.0.1-py3-none-any.whl", hash = "sha256:5174094b9637652bdb841a3029700391451bd092ba3db90600dea710ba28e97c"}, +] +"jinja2 3.0.3" = [ + {url = "https://files.pythonhosted.org/packages/20/9a/e5d9ec41927401e41aea8af6d16e78b5e612bca4699d417f646a9610a076/Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, + {url = "https://files.pythonhosted.org/packages/91/a5/429efc6246119e1e3fbf562c00187d04e83e54619249eb732bb423efa6c6/Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, +] +"markupsafe 2.0.1" = [ + {url = "https://files.pythonhosted.org/packages/04/69/c31e837e4bb5532b02d297152464b2cb8a0edeb9bef762c015e9b4e95e16/MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, + {url = "https://files.pythonhosted.org/packages/08/dc/a5ed54fcc61f75343663ee702cbf69831dcec9b1a952ae21cf3d1fbc56ba/MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, + {url = "https://files.pythonhosted.org/packages/09/f1/5ca5da61ec071ce1e9c423f66a5bde508957601118be9cd37aeccfeab2f6/MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, + {url = "https://files.pythonhosted.org/packages/0a/1d/12eb0e1d1d7e0f745cd7bcf27400d75b53096ae14f9b86d3be02a468bc75/MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, + {url = "https://files.pythonhosted.org/packages/0c/55/d7b9059ed9affe3ebdaa288006e4b82839bdbc0ecf092cd5b61d0f0ba456/MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {url = "https://files.pythonhosted.org/packages/15/90/b63743e72c9ffc5988c7b1c04d14f9a32ae49574afe8a7fbea0ce538bda4/MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, + {url = "https://files.pythonhosted.org/packages/1b/f6/f774e745edd76eef70bf83062823be0dca95ee9c9211f18aec490892ab33/MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {url = "https://files.pythonhosted.org/packages/1d/c5/1d1b42c65f96ee7b0c81761260878d1a1dc0afdf259e434b7d7af88a80a3/MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, + {url = "https://files.pythonhosted.org/packages/1f/44/ada8e01854175525e8e139278c3a52fec0ef720307cbd670bca86b473b56/MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, + {url = "https://files.pythonhosted.org/packages/20/0e/e5d5ed4bad48827aede890787b8855c7dc08301be60f2eeb0ce17ec5c810/MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, + {url = "https://files.pythonhosted.org/packages/21/84/e090d999105fe0f3e1d955725ed2c9aeebc649ee83edab0e73d353d47e5d/MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {url = "https://files.pythonhosted.org/packages/27/c3/20f02d95e78756d59a4c02f179a6ee66e3283cc34e3051d436fd152d1e76/MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, + {url = "https://files.pythonhosted.org/packages/2b/6b/69dd812a582de48190e73c08a4f526842f880a4bb53fbc6859d896621b54/MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {url = "https://files.pythonhosted.org/packages/30/9e/4b7116f464a0151b86ce42b5185941eb74c207b38fe033f71f5e5d150356/MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, + {url = "https://files.pythonhosted.org/packages/35/3e/30d8e0f71de72ccb813ba82191dc445b6f2d8aaa08169a3814384e6f39c3/MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, + {url = "https://files.pythonhosted.org/packages/3b/41/f53e2ac439b309d8bb017d12ee6e7d393aa70c508448c1f30a7e5db9d69e/MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, + {url = "https://files.pythonhosted.org/packages/3f/43/72fd80844b2687e2c5aac95b64662ede122b8c3919b4c95488017ca8d2a9/MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {url = "https://files.pythonhosted.org/packages/44/e6/4e1f202ec01062c8b4d03af72f1aeb2ca8fc97f9f5d95b9173302ac4e5ad/MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, + {url = "https://files.pythonhosted.org/packages/50/99/06eccf68be0bff67ab9a0b90b5382c04769f9ad2e42cae5e5e92f99380cd/MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, + {url = "https://files.pythonhosted.org/packages/51/1e/45e25cd867fb79339c49086dad9794e11923dd6325251ae48c341b0a4271/MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, + {url = "https://files.pythonhosted.org/packages/51/c3/7154db2b7d5b24875e1f1c42bab87a46af688bd6a5c89a90c60053cb6b33/MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, + {url = "https://files.pythonhosted.org/packages/53/e8/601efa63c4058311a8bda7984a2fe554b9da574044967d7aee253661ee46/MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {url = "https://files.pythonhosted.org/packages/5a/98/3303496a5d19aabba67c443ba1df6ee1bec94549b3f8976f90c06a6942e6/MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {url = "https://files.pythonhosted.org/packages/5a/ff/34bdcd8cc794f692588de0b3f4c1aa7ec0d17716fda9d874836ed68775c1/MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, + {url = "https://files.pythonhosted.org/packages/5b/db/49785acd523bd5eef83d0e21594eec1c2d7d45afc473dcc85037243de673/MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, + {url = "https://files.pythonhosted.org/packages/66/66/b5891704372c9f5d97432933bdd7e9b5a0647fad9170c72bb7f486550c43/MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, + {url = "https://files.pythonhosted.org/packages/67/e9/579a3ad8d48f7680f887ff1f22cc6330f083de23ce32a8fa35f8acef477a/MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {url = "https://files.pythonhosted.org/packages/68/ba/7a5ca0f9b4239e6fd846dd54c0b5928187355fa62fbdbd13e1c5942afae7/MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, + {url = "https://files.pythonhosted.org/packages/6a/96/7a23b44f742384a866173502e19cc1ec13951085bbb4e24be504dfc6da9f/MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, + {url = "https://files.pythonhosted.org/packages/6f/83/eabfb8c6d60b096dc9ada378cf935809289c4d0327b74a60789bde77e1db/MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, + {url = "https://files.pythonhosted.org/packages/70/56/f81c0cfbc22882df36358ecdedc5474571183e5a5adde1e237079acee437/MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, + {url = "https://files.pythonhosted.org/packages/70/fc/5a7253a9c1c4e2a3feadb80a5def4563500daa4b2d4a39cae39483afa1b0/MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {url = "https://files.pythonhosted.org/packages/73/60/296031f365b3ae96732225203d864fac7b83a185ed1820c1c87b78e154bc/MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, + {url = "https://files.pythonhosted.org/packages/74/5d/3d5d08321661ca30c61eb897cd9fdf35a9a63ddddd094e65deb9862986b7/MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, + {url = "https://files.pythonhosted.org/packages/75/90/b780381ddf38e2afd07a04746b5d3158a085464f7c757fc62cd198aa5379/MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, + {url = "https://files.pythonhosted.org/packages/7a/e8/00c435416c9b0238dca6f883563b01c4cc532b2ba6aaf7268081f6238520/MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, + {url = "https://files.pythonhosted.org/packages/80/ec/e4272ac306ccc17062d253cb11f5c79c457f5e78b0e3c9f6adc989d507c0/MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, + {url = "https://files.pythonhosted.org/packages/81/8b/f28eac2790d49dde61f89ae9e007ac65002edc90bb2dd63c3b9e653820d2/MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {url = "https://files.pythonhosted.org/packages/8f/87/4668ce3963e942a9aa7b13212158e74bf063a2461138b7ed5a043ac6aa79/MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, + {url = "https://files.pythonhosted.org/packages/92/ac/94771b65ac9f77cf37e43b38516697bbc4e128ee152b68d596ae44c6c896/MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, + {url = "https://files.pythonhosted.org/packages/93/28/d42b954fb9189cf4b78b0b0a025cff9b2583f93b37d1a345768ade29e5dd/MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, + {url = "https://files.pythonhosted.org/packages/95/18/b7a45c16635acafdf6837a6fd4c71acfe5bad202884c6fcbae4ea0763dde/MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, + {url = "https://files.pythonhosted.org/packages/9c/dd/1b57e1514fd2f653ee31255b940baf0609741bc059565a7fe7c4e0fec46d/MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, + {url = "https://files.pythonhosted.org/packages/a3/01/8d5fd91ccc1a61b7a9e2803819b8b60c3bac37290bbbd3df33d8d548f9c1/MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {url = "https://files.pythonhosted.org/packages/a4/c8/9d2161b2080cb69c8834d1c34a399685347523acbfc923b203ad27bf1215/MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, + {url = "https://files.pythonhosted.org/packages/a6/d1/a7b97d0e000336c4e06bfce7e08dcb2b47fc5091146ee883dfac6cb4842e/MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {url = "https://files.pythonhosted.org/packages/a7/55/a576835b6b95af21d15f69eaf14c4fb1358fd48475f2b9813abd9654132e/MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, + {url = "https://files.pythonhosted.org/packages/ad/cd/650b1be2a81674939ef962b1f1b956e4a84116d69708c842667445e95408/MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, + {url = "https://files.pythonhosted.org/packages/ae/70/8dd5f2c0aab82431c9c619a2c4fbd1742fc0fb769d8d7b275ae1d03eb3a5/MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {url = "https://files.pythonhosted.org/packages/b9/87/cdfd4778d4b9ef0dc89c62b3cf0c181c9231e523a90d7ee254afcfe74557/MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, + {url = "https://files.pythonhosted.org/packages/bf/10/ff66fea6d1788c458663a84d88787bae15d45daa16f6b3ef33322a51fc7e/MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, + {url = "https://files.pythonhosted.org/packages/bf/a8/76f613645617c31dd4db1950057b0bab68e0b790c2dbb368c1971d38d87e/MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, + {url = "https://files.pythonhosted.org/packages/c1/39/9df65c006a88fce7bbd5ec3195b949b79477b1a325564f486c611c367893/MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {url = "https://files.pythonhosted.org/packages/c2/db/314df69668f582d5173922bded7b58126044bb77cfce6347c5d992074d2e/MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, + {url = "https://files.pythonhosted.org/packages/cc/f2/854d33eee85df681e61e22b52d8e83bef8b7425c0b9826212289f7885710/MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {url = "https://files.pythonhosted.org/packages/ce/a7/835a636047f4bb4fea31a682c18affad9795e864d800892bd7248485425e/MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, + {url = "https://files.pythonhosted.org/packages/d7/56/9d9c0dc2b0f5dc342ff9c7df31c523cc122947970b5ea943b2311be0c391/MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, + {url = "https://files.pythonhosted.org/packages/dd/8f/d0c570c851f70377ca6f344531fab4b6b01a99a9d2a801b25d6fd75525e5/MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, + {url = "https://files.pythonhosted.org/packages/e2/a9/eafee9babd4b3aed918d286fbe1c20d1a22d347b30d2bddb3c49919548fa/MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, + {url = "https://files.pythonhosted.org/packages/e4/9b/c7b55a2f587368d69eb6dc36e285010ab0bbb74323833d501921e08e2728/MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {url = "https://files.pythonhosted.org/packages/e6/57/e9d243b12918f22bc3aa1392db7821dcb643a120e87b3f8c9bc7e1ad33f1/MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {url = "https://files.pythonhosted.org/packages/e9/b8/e0e089d26667fbac3a473f78fc771b1cbffd30964816928e4864aac43357/MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, + {url = "https://files.pythonhosted.org/packages/eb/3b/1cddaf0338a031ef5c2e1d9d74f2d607d564748a933b44de6edfe7a2a880/MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {url = "https://files.pythonhosted.org/packages/ee/d4/f6d8700729ca202fd070e03d08bda349bb0689514c11732dcb4f0e7bd60f/MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, + {url = "https://files.pythonhosted.org/packages/f5/ff/9a35fc0f3fbda4cada0e559833b84627ddf44c45664741aed7da1b2468f2/MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, + {url = "https://files.pythonhosted.org/packages/f9/12/b63afcb3bf9f27fd347adef452f9a6e27dfe7107a8f2685afacc8e9c6592/MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, + {url = "https://files.pythonhosted.org/packages/fa/7f/50e0b7a7c13e056f7f1ea799a04a64c225a7ae784785f6b74e7515ea94e8/MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, + {url = "https://files.pythonhosted.org/packages/fc/d6/57f9a97e56447a1e340f8574836d3b636e2c14de304943836bd645fa9c7e/MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, + {url = "https://files.pythonhosted.org/packages/ff/e2/bfd4e230d609fc7c59cc1a69e1b9f65bda3f05b8cab41bb4437f3d44b108/MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, +] "networkx 2.5.1" = [ {url = "https://files.pythonhosted.org/packages/b0/21/adfbf6168631e28577e4af9eb9f26d75fe72b2bb1d33762a5f2c425e6c2a/networkx-2.5.1.tar.gz", hash = "sha256:109cd585cac41297f71103c3c42ac6ef7379f29788eb54cb751be5a663bb235a"}, {url = "https://files.pythonhosted.org/packages/f3/b7/c7f488101c0bb5e4178f3cde416004280fd40262433496830de8a8c21613/networkx-2.5.1-py3-none-any.whl", hash = "sha256:0635858ed7e989f4c574c2328380b452df892ae85084144c73d8cd819f0c4e06"}, @@ -50,3 +235,15 @@ content_hash = "sha256:fc3df848c7def9029dac474e649472a4c1ca3c452fd124991898dfd5e "pyspark 3.2.3" = [ {url = "https://files.pythonhosted.org/packages/de/ed/05d7fd2b3e8de9e590c4c0a15ff0c675001cb64f8ea98ffd5df1f80eaf3e/pyspark-3.2.3.tar.gz", hash = "sha256:eeefb33d8dcdd3ee60a5ed5b0028af06731992b4cc65dc6f06c85841ac8d6945"}, ] +"typing-extensions 4.1.1" = [ + {url = "https://files.pythonhosted.org/packages/45/6b/44f7f8f1e110027cf88956b59f2fad776cca7e1704396d043f89effd3a0e/typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, + {url = "https://files.pythonhosted.org/packages/b1/5a/8b5fbb891ef3f81fc923bf3cb4a578c0abf9471eb50ce0f51c74212182ab/typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, +] +"werkzeug 2.0.3" = [ + {url = "https://files.pythonhosted.org/packages/6c/a8/60514fade2318e277453c9588545d0c335ea3ea6440ce5cdabfca7f73117/Werkzeug-2.0.3.tar.gz", hash = "sha256:b863f8ff057c522164b6067c9e28b041161b4be5ba4d0daceeaa50a163822d3c"}, + {url = "https://files.pythonhosted.org/packages/f4/f3/22afbdb20cc4654b10c98043414a14057cd27fdba9d4ae61cea596000ba2/Werkzeug-2.0.3-py3-none-any.whl", hash = "sha256:1421ebfc7648a39a5c58c601b154165d05cf47a3cd0ccb70857cbdacf6c8f2b8"}, +] +"zipp 3.6.0" = [ + {url = "https://files.pythonhosted.org/packages/02/bf/0d03dbdedb83afec081fefe86cae3a2447250ef1a81ac601a9a56e785401/zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, + {url = "https://files.pythonhosted.org/packages/bd/df/d4a4974a3e3957fd1c1fa3082366d7fff6e428ddb55f074bf64876f8e8ad/zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, +] diff --git a/pyproject.toml b/pyproject.toml index e192d19..778d62b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,8 +1,11 @@ [tool.pdm] +[tool.pdm.scripts] +ngdi-api = {call = "ngdi.ngdi_api.__main__:run"} + [project] name = "ngdi" -version = "0.2.3" +version = "0.2.4" description = "NebulaGraph Data Intelligence Suite" authors = [ {name = "Wey Gu", email = "weyl.gu@gmail.com"}, @@ -10,6 +13,7 @@ authors = [ dependencies = [ "pyspark>=3.2.3", "networkx>=2.5.1", + "flask>=2.0.3", ] requires-python = ">=3.6" readme = "README.md"