From 74d7210ad0f41665c1ec154808e964e94e2c77dd Mon Sep 17 00:00:00 2001 From: Natoandro Date: Sat, 25 May 2024 06:11:12 +0300 Subject: [PATCH 01/35] update deployed version --- Cargo.lock | 115 ++++----- dev/lock.yml | 2 +- examples/templates/deno/api/example.ts | 6 +- examples/templates/deno/compose.yml | 2 +- examples/templates/node/compose.yml | 2 +- examples/templates/node/package.json | 2 +- examples/templates/node/pnpm-lock.yaml | 315 +++++++++++++++++++++++++ examples/templates/python/compose.yml | 2 +- 8 files changed, 371 insertions(+), 75 deletions(-) create mode 100644 examples/templates/node/pnpm-lock.yaml diff --git a/Cargo.lock b/Cargo.lock index 2abe7cea69..418f178134 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -589,13 +589,13 @@ dependencies = [ [[package]] name = "async-channel" -version = "2.2.1" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "136d4d23bcc79e27423727b36823d86233aad06dfea531837b038394d11e9928" +checksum = "1ca33f4bc4ed1babef42cad36cc1f51fa88be00420404e5b1e80ab1b18f7678c" dependencies = [ "concurrent-queue", - "event-listener 5.3.0", - "event-listener-strategy 0.5.1", + "event-listener 4.0.3", + "event-listener-strategy", "futures-core", "pin-project-lite", ] @@ -669,7 +669,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d034b430882f8381900d3fe6f0aaa3ad94f2cb4ac519b429692a1bc2dda4ae7b" dependencies = [ "event-listener 4.0.3", - "event-listener-strategy 0.4.0", + "event-listener-strategy", "pin-project-lite", ] @@ -755,9 +755,9 @@ dependencies = [ [[package]] name = "async-task" -version = "4.7.0" +version = "4.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbb36e985947064623dbd357f727af08ffd077f93d696782f3c56365fa2e2799" +checksum = "b4eb2cdb97421e01129ccb49169d8279ed21e829929144f4a22a6e54ac549ca1" [[package]] name = "async-trait" @@ -2613,7 +2613,7 @@ dependencies = [ "cooked-waker", "deno_core_icudata", "deno_ops", - "deno_unsync 0.3.3", + "deno_unsync 0.3.4", "futures", "libc", "log", @@ -2795,7 +2795,7 @@ dependencies = [ "data-url", "deno_ast", "deno_semver", - "deno_unsync 0.3.3", + "deno_unsync 0.3.4", "encoding_rs", "futures", "import_map", @@ -3209,9 +3209,9 @@ dependencies = [ [[package]] name = "deno_unsync" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3d79c7af81e0a5ac75cff7b2fff4d1896e2bff694c688258edf21ef8a519736" +checksum = "7557a5e9278b9a5cc8056dc37062ea4344770bda4eeb5973c7cbb7ebf636b9a4" dependencies = [ "tokio", ] @@ -4170,17 +4170,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "event-listener" -version = "5.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d9944b8ca13534cdfb2800775f8dd4902ff3fc75a50101466decadfdf322a24" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite", -] - [[package]] name = "event-listener-strategy" version = "0.4.0" @@ -4191,16 +4180,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "event-listener-strategy" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "332f51cb23d20b0de8458b86580878211da09bcd4503cb579c225b3d124cabb3" -dependencies = [ - "event-listener 5.3.0", - "pin-project-lite", -] - [[package]] name = "eyre" version = "0.6.12" @@ -4327,9 +4306,9 @@ dependencies = [ [[package]] name = "fiat-crypto" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38793c55593b33412e3ae40c2c9781ffaa6f438f6f8c10f24e71846fbd7ae01e" +checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" [[package]] name = "filetime" @@ -7626,13 +7605,13 @@ checksum = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063" [[package]] name = "os_info" -version = "3.8.2" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae99c7fa6dd38c7cafe1ec085e804f8f555a2f8659b0dbe03f1f9963a9b51092" +checksum = "006e42d5b888366f1880eda20371fedde764ed2213dc8496f49622fa0c99cd5e" dependencies = [ "log", "serde 1.0.202", - "windows-sys 0.52.0", + "winapi", ] [[package]] @@ -9549,11 +9528,11 @@ checksum = "ece8e78b2f38ec51c51f5d475df0a7187ba5111b2a28bdc761ee05b075d40a71" [[package]] name = "schannel" -version = "0.1.23" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.48.0", ] [[package]] @@ -9674,9 +9653,9 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.10.0" +version = "2.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "770452e37cad93e0a50d5abc3990d2bc351c36d0328f86cefec2f2fb206eaef6" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" dependencies = [ "bitflags 1.3.2", "core-foundation", @@ -9687,9 +9666,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.10.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41f3cc463c0ef97e11c3461a9d3787412d30e8e7eb907c79180c4a57bf7c04ef" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" dependencies = [ "core-foundation-sys", "libc", @@ -12755,12 +12734,6 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" -[[package]] -name = "wasite" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" - [[package]] name = "wasm-bindgen" version = "0.2.92" @@ -12854,6 +12827,15 @@ dependencies = [ "leb128", ] +[[package]] +name = "wasm-encoder" +version = "0.208.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6425e84e42f7f558478e40ecc2287912cb319f2ca68e5c0bb93c61d4fc63fa17" +dependencies = [ + "leb128", +] + [[package]] name = "wasm-metadata" version = "0.201.0" @@ -13239,24 +13221,24 @@ dependencies = [ [[package]] name = "wast" -version = "207.0.0" +version = "208.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e40be9fd494bfa501309487d2dc0b3f229be6842464ecbdc54eac2679c84c93" +checksum = "bc00b3f023b4e2ccd2e054e240294263db52ae962892e6523e550783c83a67f1" dependencies = [ "bumpalo", "leb128", "memchr", "unicode-width", - "wasm-encoder 0.207.0", + "wasm-encoder 0.208.1", ] [[package]] name = "wat" -version = "1.207.0" +version = "1.208.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8eb2b15e2d5f300f5e1209e7dc237f2549edbd4203655b6c6cab5cf180561ee7" +checksum = "58ed38e59176550214c025ea2bd0eeefd8e86b92d0af6698d5ba95020ec2e07b" dependencies = [ - "wast 207.0.0", + "wast 208.0.1", ] [[package]] @@ -13390,20 +13372,19 @@ dependencies = [ [[package]] name = "whoami" -version = "1.5.1" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44ab49fad634e88f55bf8f9bb3abd2f27d7204172a112c7c9987e01c1c94ea9" +checksum = "22fc3756b8a9133049b26c7f61ab35416c130e8c09b660f5b3958b446f52cc50" dependencies = [ - "redox_syscall 0.4.1", - "wasite", + "wasm-bindgen", "web-sys", ] [[package]] name = "widestring" -version = "1.1.0" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7219d36b6eac893fa81e84ebe06485e7dcbb616177469b142df14f1f4deb1311" +checksum = "653f141f39ec16bba3c5abe400a0c60da7468261cc2cbf36805022876bc721a8" [[package]] name = "wiggle" @@ -13465,11 +13446,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.6" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" dependencies = [ - "winapi", + "windows-sys 0.52.0", ] [[package]] @@ -14042,9 +14023,9 @@ dependencies = [ [[package]] name = "xml-rs" -version = "0.8.20" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "791978798f0597cfc70478424c2b4fdc2b7a8024aaff78497ef00f24ef674193" +checksum = "5a56c84a8ccd4258aed21c92f70c0f6dea75356b6892ae27c24139da456f9336" [[package]] name = "xtask" diff --git a/dev/lock.yml b/dev/lock.yml index 9279b27ded..a90ac86a7d 100644 --- a/dev/lock.yml +++ b/dev/lock.yml @@ -100,7 +100,7 @@ dev: TYPEGRAPH_VERSION: 0.0.3 PRISMA_VERSION: 5.5.2 METATYPE_VERSION: 0.4.2 - PUBLISHED_VERSION: 0.4.1 + PUBLISHED_VERSION: 0.4.2 WASM_OPT_VERSION: 0.116.1 MOLD_VERSION: v2.4.0 CMAKE_VERSION: 3.28.0-rc6 diff --git a/examples/templates/deno/api/example.ts b/examples/templates/deno/api/example.ts index bc3f790846..f5086a79b3 100644 --- a/examples/templates/deno/api/example.ts +++ b/examples/templates/deno/api/example.ts @@ -1,6 +1,6 @@ -import { Policy, t, typegraph } from "npm:@typegraph/sdk@0.4.1/index.js"; -import { PythonRuntime } from "npm:@typegraph/sdk@0.4.1/runtimes/python.js"; -import { DenoRuntime } from "npm:@typegraph/sdk@0.4.1/runtimes/deno.js"; +import { Policy, t, typegraph } from "npm:@typegraph/sdk@0.4.2/index.js"; +import { PythonRuntime } from "npm:@typegraph/sdk@0.4.2/runtimes/python.js"; +import { DenoRuntime } from "npm:@typegraph/sdk@0.4.2/runtimes/deno.js"; await typegraph("example", (g) => { const pub = Policy.public(); diff --git a/examples/templates/deno/compose.yml b/examples/templates/deno/compose.yml index 55b25b10ab..676ded3087 100644 --- a/examples/templates/deno/compose.yml +++ b/examples/templates/deno/compose.yml @@ -1,6 +1,6 @@ services: typegate: - image: ghcr.io/metatypedev/typegate:v0.4.1 + image: ghcr.io/metatypedev/typegate:v0.4.2 restart: always ports: - "7890:7890" diff --git a/examples/templates/node/compose.yml b/examples/templates/node/compose.yml index 55b25b10ab..676ded3087 100644 --- a/examples/templates/node/compose.yml +++ b/examples/templates/node/compose.yml @@ -1,6 +1,6 @@ services: typegate: - image: ghcr.io/metatypedev/typegate:v0.4.1 + image: ghcr.io/metatypedev/typegate:v0.4.2 restart: always ports: - "7890:7890" diff --git a/examples/templates/node/package.json b/examples/templates/node/package.json index 8a5418b7e9..4507e83e7c 100644 --- a/examples/templates/node/package.json +++ b/examples/templates/node/package.json @@ -6,7 +6,7 @@ "dev": "MCLI_LOADER_CMD='npm x tsx' meta dev" }, "dependencies": { - "@typegraph/sdk": "^0.4.1" + "@typegraph/sdk": "^0.4.2" }, "devDependencies": { "tsx": "^3.13.0", diff --git a/examples/templates/node/pnpm-lock.yaml b/examples/templates/node/pnpm-lock.yaml new file mode 100644 index 0000000000..719e5dc08f --- /dev/null +++ b/examples/templates/node/pnpm-lock.yaml @@ -0,0 +1,315 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + dependencies: + '@typegraph/sdk': + specifier: ^0.4.2 + version: 0.4.2 + devDependencies: + tsx: + specifier: ^3.13.0 + version: 3.14.0 + typescript: + specifier: ^5.2.2 + version: 5.4.5 + +packages: + + '@esbuild/android-arm64@0.18.20': + resolution: {integrity: sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ==} + engines: {node: '>=12'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm@0.18.20': + resolution: {integrity: sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw==} + engines: {node: '>=12'} + cpu: [arm] + os: [android] + + '@esbuild/android-x64@0.18.20': + resolution: {integrity: sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg==} + engines: {node: '>=12'} + cpu: [x64] + os: [android] + + '@esbuild/darwin-arm64@0.18.20': + resolution: {integrity: sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-x64@0.18.20': + resolution: {integrity: sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [darwin] + + '@esbuild/freebsd-arm64@0.18.20': + resolution: {integrity: sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw==} + engines: {node: '>=12'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.18.20': + resolution: {integrity: sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [freebsd] + + '@esbuild/linux-arm64@0.18.20': + resolution: {integrity: sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm@0.18.20': + resolution: {integrity: sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg==} + engines: {node: '>=12'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-ia32@0.18.20': + resolution: {integrity: sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA==} + engines: {node: '>=12'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-loong64@0.18.20': + resolution: {integrity: sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg==} + engines: {node: '>=12'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-mips64el@0.18.20': + resolution: {integrity: sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ==} + engines: {node: '>=12'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-ppc64@0.18.20': + resolution: {integrity: sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-riscv64@0.18.20': + resolution: {integrity: sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A==} + engines: {node: '>=12'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-s390x@0.18.20': + resolution: {integrity: sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ==} + engines: {node: '>=12'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-x64@0.18.20': + resolution: {integrity: sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w==} + engines: {node: '>=12'} + cpu: [x64] + os: [linux] + + '@esbuild/netbsd-x64@0.18.20': + resolution: {integrity: sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A==} + engines: {node: '>=12'} + cpu: [x64] + os: [netbsd] + + '@esbuild/openbsd-x64@0.18.20': + resolution: {integrity: sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==} + engines: {node: '>=12'} + cpu: [x64] + os: [openbsd] + + '@esbuild/sunos-x64@0.18.20': + resolution: {integrity: sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [sunos] + + '@esbuild/win32-arm64@0.18.20': + resolution: {integrity: sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg==} + engines: {node: '>=12'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-ia32@0.18.20': + resolution: {integrity: sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g==} + engines: {node: '>=12'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-x64@0.18.20': + resolution: {integrity: sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [win32] + + '@typegraph/sdk@0.4.2': + resolution: {integrity: sha512-6IDm7V6XyibTJXhH3bhz7W7QYYkYFVTj/ycMOem/Cq9lQ4WN6pHO3yTzfQsldZci1A6U9JHKVFaunjhgNyo1eA==} + + buffer-from@1.1.2: + resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} + + esbuild@0.18.20: + resolution: {integrity: sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA==} + engines: {node: '>=12'} + hasBin: true + + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + get-tsconfig@4.7.5: + resolution: {integrity: sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw==} + + resolve-pkg-maps@1.0.0: + resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} + + source-map-support@0.5.21: + resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==} + + source-map@0.6.1: + resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} + engines: {node: '>=0.10.0'} + + tsx@3.14.0: + resolution: {integrity: sha512-xHtFaKtHxM9LOklMmJdI3BEnQq/D5F73Of2E1GDrITi9sgoVkvIsrQUTY1G8FlmGtA+awCI4EBlTRRYxkL2sRg==} + hasBin: true + + typescript@5.4.5: + resolution: {integrity: sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==} + engines: {node: '>=14.17'} + hasBin: true + +snapshots: + + '@esbuild/android-arm64@0.18.20': + optional: true + + '@esbuild/android-arm@0.18.20': + optional: true + + '@esbuild/android-x64@0.18.20': + optional: true + + '@esbuild/darwin-arm64@0.18.20': + optional: true + + '@esbuild/darwin-x64@0.18.20': + optional: true + + '@esbuild/freebsd-arm64@0.18.20': + optional: true + + '@esbuild/freebsd-x64@0.18.20': + optional: true + + '@esbuild/linux-arm64@0.18.20': + optional: true + + '@esbuild/linux-arm@0.18.20': + optional: true + + '@esbuild/linux-ia32@0.18.20': + optional: true + + '@esbuild/linux-loong64@0.18.20': + optional: true + + '@esbuild/linux-mips64el@0.18.20': + optional: true + + '@esbuild/linux-ppc64@0.18.20': + optional: true + + '@esbuild/linux-riscv64@0.18.20': + optional: true + + '@esbuild/linux-s390x@0.18.20': + optional: true + + '@esbuild/linux-x64@0.18.20': + optional: true + + '@esbuild/netbsd-x64@0.18.20': + optional: true + + '@esbuild/openbsd-x64@0.18.20': + optional: true + + '@esbuild/sunos-x64@0.18.20': + optional: true + + '@esbuild/win32-arm64@0.18.20': + optional: true + + '@esbuild/win32-ia32@0.18.20': + optional: true + + '@esbuild/win32-x64@0.18.20': + optional: true + + '@typegraph/sdk@0.4.2': {} + + buffer-from@1.1.2: {} + + esbuild@0.18.20: + optionalDependencies: + '@esbuild/android-arm': 0.18.20 + '@esbuild/android-arm64': 0.18.20 + '@esbuild/android-x64': 0.18.20 + '@esbuild/darwin-arm64': 0.18.20 + '@esbuild/darwin-x64': 0.18.20 + '@esbuild/freebsd-arm64': 0.18.20 + '@esbuild/freebsd-x64': 0.18.20 + '@esbuild/linux-arm': 0.18.20 + '@esbuild/linux-arm64': 0.18.20 + '@esbuild/linux-ia32': 0.18.20 + '@esbuild/linux-loong64': 0.18.20 + '@esbuild/linux-mips64el': 0.18.20 + '@esbuild/linux-ppc64': 0.18.20 + '@esbuild/linux-riscv64': 0.18.20 + '@esbuild/linux-s390x': 0.18.20 + '@esbuild/linux-x64': 0.18.20 + '@esbuild/netbsd-x64': 0.18.20 + '@esbuild/openbsd-x64': 0.18.20 + '@esbuild/sunos-x64': 0.18.20 + '@esbuild/win32-arm64': 0.18.20 + '@esbuild/win32-ia32': 0.18.20 + '@esbuild/win32-x64': 0.18.20 + + fsevents@2.3.3: + optional: true + + get-tsconfig@4.7.5: + dependencies: + resolve-pkg-maps: 1.0.0 + + resolve-pkg-maps@1.0.0: {} + + source-map-support@0.5.21: + dependencies: + buffer-from: 1.1.2 + source-map: 0.6.1 + + source-map@0.6.1: {} + + tsx@3.14.0: + dependencies: + esbuild: 0.18.20 + get-tsconfig: 4.7.5 + source-map-support: 0.5.21 + optionalDependencies: + fsevents: 2.3.3 + + typescript@5.4.5: {} diff --git a/examples/templates/python/compose.yml b/examples/templates/python/compose.yml index 55b25b10ab..676ded3087 100644 --- a/examples/templates/python/compose.yml +++ b/examples/templates/python/compose.yml @@ -1,6 +1,6 @@ services: typegate: - image: ghcr.io/metatypedev/typegate:v0.4.1 + image: ghcr.io/metatypedev/typegate:v0.4.2 restart: always ports: - "7890:7890" From a3463317b2787e2ec3709d0c0b967d30f150759f Mon Sep 17 00:00:00 2001 From: Natoandro Date: Wed, 29 May 2024 15:29:00 +0300 Subject: [PATCH 02/35] (cli) new actors --- meta-cli/src/cli/deploy.rs | 517 +++++++++++---------- meta-cli/src/com/store.rs | 1 + meta-cli/src/deploy/actors/discovery.rs | 28 +- meta-cli/src/deploy/actors/loader.rs | 10 +- meta-cli/src/deploy/actors/mod.rs | 2 + meta-cli/src/deploy/actors/task.rs | 401 ++++++++++++++++ meta-cli/src/deploy/actors/task/action.rs | 205 ++++++++ meta-cli/src/deploy/actors/task/command.rs | 50 ++ meta-cli/src/deploy/actors/task_manager.rs | 311 +++++++++++++ meta-cli/src/deploy/actors/watcher.rs | 93 ++-- meta-cli/src/deploy/push/pusher.rs | 1 + meta-cli/src/main.rs | 6 +- meta-cli/src/typegraph/loader/mod.rs | 123 +++-- typegate/src/services/artifact_service.ts | 1 + 14 files changed, 1415 insertions(+), 334 deletions(-) create mode 100644 meta-cli/src/deploy/actors/task.rs create mode 100644 meta-cli/src/deploy/actors/task/action.rs create mode 100644 meta-cli/src/deploy/actors/task/command.rs create mode 100644 meta-cli/src/deploy/actors/task_manager.rs diff --git a/meta-cli/src/cli/deploy.rs b/meta-cli/src/cli/deploy.rs index 7d4f29a3f8..11ab154555 100644 --- a/meta-cli/src/cli/deploy.rs +++ b/meta-cli/src/cli/deploy.rs @@ -1,23 +1,26 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use crate::interlude::*; - +use self::actors::task::action::DeployAction; +use self::actors::task::action::DeployActionGenerator; +use self::actors::task::TaskConfig; +use self::actors::task_manager::{self, StopReason, TaskReason}; use super::{Action, ConfigArgs, NodeArgs}; use crate::com::store::{Command, Endpoint, MigrationAction, ServerStore}; use crate::config::Config; use crate::deploy::actors; use crate::deploy::actors::console::{Console, ConsoleActor}; use crate::deploy::actors::discovery::DiscoveryActor; -use crate::deploy::actors::loader::{ - self, LoaderActor, LoaderEvent, ReloadModule, ReloadReason, StopBehavior, -}; -use crate::deploy::actors::watcher::WatcherActor; +use crate::deploy::actors::loader::{self, LoaderEvent, ReloadModule, ReloadReason, StopBehavior}; +use crate::deploy::actors::task::action::TaskAction; +use crate::deploy::actors::task_manager::TaskManager; +use crate::deploy::actors::watcher::{self, WatcherActor}; use crate::deploy::push::pusher::PushResult; +use crate::interlude::*; use crate::secrets::{RawSecrets, Secrets}; -use actix::prelude::*; use actix_web::dev::ServerHandle; use clap::Parser; +use futures::channel::oneshot; use normpath::PathExt; use owo_colors::OwoColorize; use tokio::sync::mpsc; @@ -103,7 +106,7 @@ pub struct Deploy { } impl Deploy { - #[tracing::instrument] + #[tracing::instrument(level = "debug")] pub async fn new(deploy: &DeploySubcommand, args: &ConfigArgs) -> Result { let dir = args.dir(); @@ -114,6 +117,10 @@ impl Deploy { let node_config = config.node(&deploy.node, &deploy.target); let secrets = Secrets::load_from_node_config(&node_config); + debug!( + "validating configuration for target {:?}", + deploy.target.yellow() + ); let node = node_config .build(&dir) .await @@ -155,12 +162,12 @@ impl Deploy { struct CtrlCHandlerData { watcher: Addr, - loader: Addr, + task_manager: Addr>, } #[async_trait] impl Action for DeploySubcommand { - #[tracing::instrument] + #[tracing::instrument(level = "debug")] async fn run(&self, args: ConfigArgs, server_handle: Option) -> Result<()> { let deploy = Deploy::new(self, &args).await?; @@ -182,12 +189,14 @@ impl Action for DeploySubcommand { } if deploy.options.watch { + info!("running in watch mode"); // watch the content of a folder if self.file.is_some() { bail!("Cannot use --file in watch mode"); } watch_mode::enter_watch_mode(deploy).await?; } else { + trace!("running in default mode"); // deploy a single file let deploy = default_mode::DefaultMode::init(deploy).await?; deploy.run().await?; @@ -201,16 +210,15 @@ impl Action for DeploySubcommand { mod default_mode { //! non-watch mode - use default_mode::actors::loader::LoadModule; - use futures::channel::oneshot; use super::*; pub struct DefaultMode { deploy: Deploy, console: Addr, - loader: Addr, - loader_event_rx: mpsc::UnboundedReceiver, + task_manager: Addr>, + report_rx: oneshot::Receiver>, + // loader_event_rx: mpsc::UnboundedReceiver, } impl DefaultMode { @@ -222,13 +230,18 @@ mod default_mode { ServerStore::set_secrets(secrets.hydrate(deploy.base_dir.clone()).await?); - let (loader_event_tx, loader_event_rx) = mpsc::unbounded_channel(); + // let (loader_event_tx, loader_event_rx) = mpsc::unbounded_channel(); - let loader = LoaderActor::new( - Arc::clone(&deploy.config), - console.clone(), - loader_event_tx, + let (report_tx, report_rx) = oneshot::channel(); + + let task_config = TaskConfig::init(deploy.base_dir.clone()); + let action_generator = DeployActionGenerator::new(task_config); + + let task_manager = TaskManager::new( + action_generator, deploy.max_parallel_loads.unwrap_or_else(num_cpus::get), + report_tx, + console.clone(), ) .auto_stop() .start(); @@ -236,8 +249,8 @@ mod default_mode { Ok(Self { deploy, console, - loader, - loader_event_rx, + task_manager, + report_rx, }) } @@ -245,20 +258,26 @@ mod default_mode { debug!(file = ?self.deploy.file); { - let loader = self.loader.clone(); + let task_manager = self.task_manager.clone(); ctrlc::set_handler(move || { - loader.do_send(loader::TryStop(StopBehavior::ExitSuccess)); + debug!("CTRL-C handler"); + task_manager.do_send(task_manager::message::Stop); + // loader.do_send(loader::TryStop(StopBehavior::ExitSuccess)); }) } .context("setting Ctrl-C handler")?; - let _discovery = if let Some(file) = self.deploy.file.clone() { - self.loader.do_send(LoadModule(file.to_path_buf().into())); + let _discovery = if let Some(path) = self.deploy.file.clone() { + self.task_manager.do_send(task_manager::message::AddTask { + path, + reason: TaskReason::Discovery, + }); + // self.loader.do_send(LoadModule(file.to_path_buf().into())); None } else { Some( DiscoveryActor::new( Arc::clone(&self.deploy.config), - self.loader.clone(), + self.task_manager.clone(), self.console.clone(), Arc::clone(&self.deploy.base_dir), ) @@ -266,141 +285,151 @@ mod default_mode { ) }; - let loader = self.loader.clone(); - let stopped = loader::stopped(loader); - self.handle_loaded_typegraphs().await??; + // let stopped = loader::stopped(loader); + // self.handle_loaded_typegraphs().await??; + let report = self.report_rx.await?; - match stopped.await { - Ok(StopBehavior::Restart) => unreachable!("LoaderActor should not restart"), - Ok(StopBehavior::ExitSuccess) => Ok(()), - Ok(StopBehavior::ExitFailure(msg)) => bail!("LoaderActor exit failure: {msg}"), - Err(err) => panic!("Loader actor stopped unexpectedly: {err:?}"), - } - } + // TODO display report entries - #[tracing::instrument(skip(self))] - fn handle_loaded_typegraphs(self) -> oneshot::Receiver> { - let mut event_rx = self.loader_event_rx; - let console = self.console.clone(); - let (tx, rx) = oneshot::channel(); - let fut = async move { - let mut errors = vec![]; - while let Some(event) = event_rx.recv().await { - match event { - LoaderEvent::Typegraph(tg_infos) => { - let responses = match tg_infos.get_responses_or_fail() { - Ok(val) => val, - Err(err) => { - console.error(format!( - "failed pushing typegraph at {:?}: {err:#}", - tg_infos.path.display().cyan(), - )); - errors.push((tg_infos.path.clone(), err)); - continue; - } - }; - for (name, res) in responses.iter() { - match PushResult::new( - self.console.clone(), - self.loader.clone(), - res.clone(), - ) { - Ok(push) => push.finalize().await.unwrap(), - Err(err) => { - console.error(format!( - "failed pushing typegraph {:?} at {:?}: {err:#}", - name.yellow(), - tg_infos.path.display().cyan(), - )); - errors.push((tg_infos.path.clone(), err)); - } - } - } - } - LoaderEvent::Stopped(b) => { - if let StopBehavior::ExitFailure(msg) = b { - error!("LoaderActor exit failure: {}", msg.red()); - } - } - } + match report.stop_reason { + StopReason::Natural => Ok(()), + StopReason::Restart => { + unreachable!("TaskManager should not restart on the default mode") } - trace!("typegraph channel closed."); - if errors.is_empty() { - tx.send(Ok(())).unwrap_or_log(); - } else { - tx.send(Err(errors.into_iter().fold( - ferr!("loader encountered errors").suppress_backtrace(true), - |report, (path, err)| { - report.section( - format!("{}", format!("{err:#}").red()) - .header(format!("{}:", path.display().purple())), - ) - }, - ))) - .unwrap_or_log(); + StopReason::Manual => Err(eyre::eyre!("tasks manually stopped")), + StopReason::ManualForced => Err(eyre::eyre!("tasks manually stopped (forced)")), + StopReason::Error => { + // error should have already been reported + Err(eyre::eyre!("failed")) } - // pusher address will be dropped when both loops are done - }; - Arbiter::current().spawn(fut.in_current_span()); - rx + } } + + // #[tracing::instrument(skip(self))] + // fn handle_loaded_typegraphs(self) -> oneshot::Receiver> { + // let mut event_rx = self.loader_event_rx; + // let console = self.console.clone(); + // let (tx, rx) = oneshot::channel(); + // let fut = async move { + // let mut errors = vec![]; + // while let Some(event) = event_rx.recv().await { + // match event { + // LoaderEvent::Typegraph(tg_infos) => { + // let responses = match tg_infos.get_responses_or_fail() { + // Ok(val) => val, + // Err(err) => { + // console.error(format!( + // "failed pushing typegraph at {:?}: {err:#}", + // tg_infos.path.display().cyan(), + // )); + // errors.push((tg_infos.path.clone(), err)); + // continue; + // } + // }; + // for (name, res) in responses.iter() { + // match PushResult::new( + // self.console.clone(), + // self.loader.clone(), + // res.clone(), + // ) { + // Ok(push) => push.finalize().await.unwrap(), + // Err(err) => { + // console.error(format!( + // "failed pushing typegraph {:?} at {:?}: {err:#}", + // name.yellow(), + // tg_infos.path.display().cyan(), + // )); + // errors.push((tg_infos.path.clone(), err)); + // } + // } + // } + // } + // LoaderEvent::Stopped(b) => { + // if let StopBehavior::ExitFailure(msg) = b { + // error!("LoaderActor exit failure: {}", msg.red()); + // } + // } + // } + // } + // trace!("typegraph channel closed."); + // if errors.is_empty() { + // tx.send(Ok(())).unwrap_or_log(); + // } else { + // tx.send(Err(errors.into_iter().fold( + // ferr!("loader encountered errors").suppress_backtrace(true), + // |report, (path, err)| { + // report.section( + // format!("{}", format!("{err:#}").red()) + // .header(format!("{}:", path.display().purple())), + // ) + // }, + // ))) + // .unwrap_or_log(); + // } + // // pusher address will be dropped when both loops are done + // }; + // Arbiter::current().spawn(fut.in_current_span()); + // rx + // } } } mod watch_mode { - - use watch_mode::actors::loader::LoadModule; - - use crate::deploy::push::pusher::RetryManager; - use super::*; #[tracing::instrument] pub async fn enter_watch_mode(deploy: Deploy) -> Result<()> { let console = ConsoleActor::new(Arc::clone(&deploy.config)).start(); - let ctrlc_handler_data = Arc::new(Mutex::new(None)); + let ctrlc_handler_data = Arc::new(std::sync::Mutex::new(None)); let data = ctrlc_handler_data.clone(); ctrlc::set_handler(move || { let mut data = data.lock().unwrap(); - if let Some(CtrlCHandlerData { watcher, loader }) = data.take() { - watcher.do_send(actors::watcher::Stop); - loader.do_send(loader::TryStop(StopBehavior::ExitSuccess)); + if let Some(CtrlCHandlerData { + watcher, + task_manager, + }) = data.take() + { + watcher.do_send(watcher::message::Stop); + task_manager.do_send(task_manager::message::Stop); } }) .context("setting Ctrl-C handler")?; + let task_config = TaskConfig::init(deploy.base_dir.clone()); + let action_generator = DeployActionGenerator::new(task_config); + loop { let mut secrets = deploy.secrets.clone(); secrets.apply_overrides(&deploy.options.secrets)?; ServerStore::set_secrets(secrets.hydrate(deploy.base_dir.clone()).await?); - let (loader_event_tx, loader_event_rx) = mpsc::unbounded_channel(); + // let (loader_event_tx, loader_event_rx) = mpsc::unbounded_channel(); - let loader = LoaderActor::new( - Arc::clone(&deploy.config), - console.clone(), - loader_event_tx, + let (report_tx, report_rx) = oneshot::channel(); + + let task_manager = TaskManager::new( + action_generator.clone(), deploy.max_parallel_loads.unwrap_or_else(num_cpus::get), + report_tx, + console.clone(), ) .start(); let _discovery = DiscoveryActor::new( Arc::clone(&deploy.config), - loader.clone(), + task_manager.clone(), console.clone(), Arc::clone(&deploy.base_dir), ) .start(); - let (watch_event_tx, watch_event_rx) = mpsc::unbounded_channel(); - let watcher = WatcherActor::new( Arc::clone(&deploy.config), deploy.base_dir.clone(), - watch_event_tx, + task_manager.clone(), console.clone(), )? .start(); @@ -408,154 +437,154 @@ mod watch_mode { let actor_system = ActorSystem { console: console.clone(), watcher, - loader: loader.clone(), + task_manager: task_manager.clone(), }; - actor_system.handle_loaded_typegraphs(loader_event_rx); - actor_system.handle_watch_events(watch_event_rx); + // actor_system.handle_loaded_typegraphs(loader_event_rx); + // actor_system.handle_watch_events(watch_event_rx); actor_system.update_ctrlc_handler(ctrlc_handler_data.clone()); - // TODO wait for push lifecycle - match loader::stopped(loader).await { - Ok(StopBehavior::ExitSuccess) => { - break; + let report = report_rx.await?; + + match report.stop_reason { + StopReason::Natural => { + unreachable!("TaskManager should not stop naturally on watch mode") } - Ok(StopBehavior::Restart) => { + StopReason::Restart => { continue; } - Ok(StopBehavior::ExitFailure(_)) => { - break; + StopReason::Manual => { + return Err(eyre::eyre!("tasks manually stopped")); } - Err(e) => { - panic!("Loader actor stopped unexpectedly: {e:?}"); + StopReason::ManualForced => { + return Err(eyre::eyre!("tasks manually stopped (forced)")); } + StopReason::Error => return Err(eyre::eyre!("failed")), } } - - Ok(()) } struct ActorSystem { console: Addr, watcher: Addr, - loader: Addr, + task_manager: Addr>, } impl ActorSystem { - #[tracing::instrument(skip(self))] - fn handle_loaded_typegraphs(&self, event_rx: mpsc::UnboundedReceiver) { - let console = self.console.clone(); - let loader = self.loader.clone(); - let fut = async move { - let mut event_rx = event_rx; - while let Some(event) = event_rx.recv().await { - match event { - LoaderEvent::Typegraph(tg_infos) => { - let responses = ServerStore::get_responses_or_fail(&tg_infos.path) - .unwrap_or_log() - .as_ref() - .to_owned(); - for (name, response) in responses.into_iter() { - match PushResult::new(console.clone(), loader.clone(), response) { - Ok(push) => { - if let Err(err) = push.finalize().await { - panic!("{err:#}"); - } - RetryManager::clear_counter(&tg_infos.path); - } - Err(err) => { - let tg_path = tg_infos.path.clone(); - console.error(format!( - "failed pushing typegraph {name:?} at {tg_path:?}: {err:#}", - )); - if let Some(delay) = RetryManager::next_delay(&tg_path) { - console.info(format!( - "retry {}/{}, retrying after {}s of {:?}", - delay.retry, - delay.max, - delay.duration.as_secs(), - tg_path.display(), - )); - tokio::time::sleep(delay.duration).await; - loader.do_send(LoadModule(Arc::new(tg_path))); - } - } - } - } - } - LoaderEvent::Stopped(b) => { - if let StopBehavior::ExitFailure(msg) = b { - panic!("{msg}"); - } - } - } - } - trace!("Typegraph channel closed."); - // pusher address will be dropped when both loops are done - }; - Arbiter::current().spawn(fut.in_current_span()); - } - - #[tracing::instrument(skip(self))] - fn handle_watch_events( - &self, - watch_event_rx: mpsc::UnboundedReceiver, - ) { - let console = self.console.clone(); - let watcher = self.watcher.clone(); - let loader = self.loader.clone(); - let fut = async move { - let mut watch_event_rx = watch_event_rx; - while let Some(event) = watch_event_rx.recv().await { - use actors::watcher::Event as E; - match event { - E::ConfigChanged => { - RetryManager::reset(); - - console.warning("metatype configuration file changed".to_string()); - console.warning("reloading everything".to_string()); - - loader.do_send(loader::TryStop(StopBehavior::Restart)); - watcher.do_send(actors::watcher::Stop); - } - E::TypegraphModuleChanged { typegraph_module } => { - RetryManager::clear_counter(&typegraph_module); - loader.do_send(ReloadModule( - typegraph_module.into(), - ReloadReason::FileChanged, - )); - } - E::TypegraphModuleDeleted { typegraph_module } => { - RetryManager::clear_counter(&typegraph_module); - - // TODO internally by the watcher?? - watcher.do_send(actors::watcher::RemoveTypegraph( - typegraph_module.clone(), - )); - // TODO delete typegraph in typegate?? - } - E::DependencyChanged { - typegraph_module, - dependency_path, - } => { - RetryManager::clear_counter(&typegraph_module); - - loader.do_send(ReloadModule( - typegraph_module.into(), - ReloadReason::DependencyChanged(dependency_path), - )); - } - } - } - trace!("watcher event channel closed"); - }; - Arbiter::current().spawn(fut.in_current_span()); - } - - fn update_ctrlc_handler(&self, data: Arc>>) { + // #[tracing::instrument(skip(self))] + // fn handle_loaded_typegraphs(&self, event_rx: mpsc::UnboundedReceiver) { + // let console = self.console.clone(); + // let loader = self.loader.clone(); + // let fut = async move { + // let mut event_rx = event_rx; + // while let Some(event) = event_rx.recv().await { + // match event { + // LoaderEvent::Typegraph(tg_infos) => { + // let responses = ServerStore::get_responses_or_fail(&tg_infos.path) + // .unwrap_or_log() + // .as_ref() + // .to_owned(); + // for (name, response) in responses.into_iter() { + // match PushResult::new(console.clone(), loader.clone(), response) { + // Ok(push) => { + // if let Err(err) = push.finalize().await { + // panic!("{err:#}"); + // } + // RetryManager::clear_counter(&tg_infos.path); + // } + // Err(err) => { + // let tg_path = tg_infos.path.clone(); + // console.error(format!( + // "failed pushing typegraph {name:?} at {tg_path:?}: {err:#}", + // )); + // if let Some(delay) = RetryManager::next_delay(&tg_path) { + // console.info(format!( + // "retry {}/{}, retrying after {}s of {:?}", + // delay.retry, + // delay.max, + // delay.duration.as_secs(), + // tg_path.display(), + // )); + // tokio::time::sleep(delay.duration).await; + // loader.do_send(LoadModule(Arc::new(tg_path))); + // } + // } + // } + // } + // } + // LoaderEvent::Stopped(b) => { + // if let StopBehavior::ExitFailure(msg) = b { + // panic!("{msg}"); + // } + // } + // } + // } + // trace!("Typegraph channel closed."); + // // pusher address will be dropped when both loops are done + // }; + // Arbiter::current().spawn(fut.in_current_span()); + // } + + // #[tracing::instrument(skip(self))] + // fn handle_watch_events( + // &self, + // watch_event_rx: mpsc::UnboundedReceiver, + // ) { + // let console = self.console.clone(); + // let watcher = self.watcher.clone(); + // let loader = self.loader.clone(); + // let fut = async move { + // let mut watch_event_rx = watch_event_rx; + // while let Some(event) = watch_event_rx.recv().await { + // use actors::watcher::Event as E; + // match event { + // E::ConfigChanged => { + // RetryManager::reset(); + // + // console.warning("metatype configuration file changed".to_string()); + // console.warning("reloading everything".to_string()); + // + // loader.do_send(loader::TryStop(StopBehavior::Restart)); + // watcher.do_send(actors::watcher::Stop); + // } + // E::TypegraphModuleChanged { typegraph_module } => { + // RetryManager::clear_counter(&typegraph_module); + // loader.do_send(ReloadModule( + // typegraph_module.into(), + // ReloadReason::FileChanged, + // )); + // } + // E::TypegraphModuleDeleted { typegraph_module } => { + // RetryManager::clear_counter(&typegraph_module); + // + // // TODO internally by the watcher?? + // watcher.do_send(actors::watcher::RemoveTypegraph( + // typegraph_module.clone(), + // )); + // // TODO delete typegraph in typegate?? + // } + // E::DependencyChanged { + // typegraph_module, + // dependency_path, + // } => { + // RetryManager::clear_counter(&typegraph_module); + // + // loader.do_send(ReloadModule( + // typegraph_module.into(), + // ReloadReason::DependencyChanged(dependency_path), + // )); + // } + // } + // } + // trace!("watcher event channel closed"); + // }; + // Arbiter::current().spawn(fut.in_current_span()); + // } + + fn update_ctrlc_handler(&self, data: Arc>>) { *data.lock().unwrap() = Some(CtrlCHandlerData { watcher: self.watcher.clone(), - loader: self.loader.clone(), + task_manager: self.task_manager.clone(), }); } } diff --git a/meta-cli/src/com/store.rs b/meta-cli/src/com/store.rs index 67e74a7610..0e4a81e687 100644 --- a/meta-cli/src/com/store.rs +++ b/meta-cli/src/com/store.rs @@ -7,6 +7,7 @@ use common::node::BasicAuth; use lazy_static::lazy_static; use serde::{Deserialize, Serialize}; use std::borrow::{Borrow, BorrowMut}; +use std::sync::Mutex; use super::responses::SDKResponse; diff --git a/meta-cli/src/deploy/actors/discovery.rs b/meta-cli/src/deploy/actors/discovery.rs index 88e529a40f..f8e0c1e7ad 100644 --- a/meta-cli/src/deploy/actors/discovery.rs +++ b/meta-cli/src/deploy/actors/discovery.rs @@ -1,32 +1,33 @@ +use crate::deploy::actors::task_manager::{self, TaskReason}; // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 use crate::interlude::*; -use actix::prelude::*; use pathdiff::diff_paths; use crate::{config::Config, typegraph::loader::Discovery}; use super::console::{Console, ConsoleActor}; -use super::loader::{LoadModule, LoaderActor}; +use super::task::action::TaskAction; +use super::task_manager::TaskManager; -pub struct DiscoveryActor { +pub struct DiscoveryActor { config: Arc, - loader: Addr, + task_manager: Addr>, console: Addr, directory: Arc, } -impl DiscoveryActor { +impl DiscoveryActor { pub fn new( config: Arc, - loader: Addr, + task_manager: Addr>, console: Addr, directory: Arc, ) -> Self { Self { config, - loader, + task_manager, console, directory, } @@ -37,7 +38,7 @@ impl DiscoveryActor { #[rtype(result = "()")] struct Stop; -impl Actor for DiscoveryActor { +impl Actor for DiscoveryActor { type Context = Context; #[tracing::instrument(skip(self))] @@ -46,7 +47,7 @@ impl Actor for DiscoveryActor { let config = Arc::clone(&self.config); let dir = self.directory.clone(); - let loader = self.loader.clone(); + let task_manager = self.task_manager.clone(); let console = self.console.clone(); let discovery = ctx.address(); let fut = async move { @@ -58,7 +59,10 @@ impl Actor for DiscoveryActor { "Found typegraph definition module at {}", rel_path.display() )); - loader.do_send(LoadModule(path.into())); + task_manager.do_send(task_manager::message::AddTask { + path: path.into(), + reason: TaskReason::Discovery, + }); } Err(err) => console.error(format!("Error while discovering modules: {}", err)), }) @@ -71,7 +75,7 @@ impl Actor for DiscoveryActor { discovery.do_send(Stop); } .in_current_span(); - Arbiter::current().spawn(fut); + ctx.spawn(fut.into_actor(self)); } fn stopped(&mut self, _ctx: &mut Self::Context) { @@ -79,7 +83,7 @@ impl Actor for DiscoveryActor { } } -impl Handler for DiscoveryActor { +impl Handler for DiscoveryActor { type Result = (); fn handle(&mut self, msg: Stop, ctx: &mut Self::Context) -> Self::Result { diff --git a/meta-cli/src/deploy/actors/loader.rs b/meta-cli/src/deploy/actors/loader.rs index 18486b1b05..6f27d70e74 100644 --- a/meta-cli/src/deploy/actors/loader.rs +++ b/meta-cli/src/deploy/actors/loader.rs @@ -253,8 +253,8 @@ impl Handler for LoaderActor { } } -pub fn stopped(addr: Addr) -> oneshot::Receiver { - let (tx, rx) = oneshot::channel(); - addr.do_send(SetStoppedTx(tx)); - rx -} +// pub fn stopped(addr: Addr) -> oneshot::Receiver { +// let (tx, rx) = oneshot::channel(); +// addr.do_send(SetStoppedTx(tx)); +// rx +// } diff --git a/meta-cli/src/deploy/actors/mod.rs b/meta-cli/src/deploy/actors/mod.rs index cab58a18af..5ede9e2414 100644 --- a/meta-cli/src/deploy/actors/mod.rs +++ b/meta-cli/src/deploy/actors/mod.rs @@ -4,4 +4,6 @@ pub mod console; pub mod discovery; pub mod loader; +pub mod task; +pub mod task_manager; pub mod watcher; diff --git a/meta-cli/src/deploy/actors/task.rs b/meta-cli/src/deploy/actors/task.rs new file mode 100644 index 0000000000..4154ccee6e --- /dev/null +++ b/meta-cli/src/deploy/actors/task.rs @@ -0,0 +1,401 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + +//! The `TaskActor` actor manages a single deploy/serialize task. +//! It starts the task process, processes outputs on stdout and stderr, +//! then reports to `TaskManager`. +//! +//! Note: On the task process +//! - stdout is used for logging and task output; each line is prefix by +//! either one of "debug: ", "info: ", "warn: ", "error: " for logging, +//! or "output: " for JSON-serialized outputs like serialized typegraph +//! or deployment report. +//! - stderr is used for fatal errors that causes the program to exit; mainly +//! unhandled exception in JavaScript or Python +//! +//! TODO: manage the communication between the CLI and the task process in the `TaskActor`. + +pub mod action; +mod command; + +use self::action::TaskAction; +use super::console::{Console, ConsoleActor}; +use super::task_manager::{self, TaskManager}; +use crate::{com::server::get_instance_port, interlude::*}; +use actix::prelude::*; +use common::typegraph::Typegraph; +use process_wrap::tokio::TokioChildWrapper; +use std::time::Duration; +use tokio::io::{AsyncBufReadExt, BufReader, Lines}; +use tokio::process::{ChildStdout, Command}; + +pub mod message { + use super::*; + + #[derive(Message)] + #[rtype(result = "()")] + pub(super) struct StartProcess(pub Command); + + #[derive(Message)] + #[rtype(result = "()")] + pub(super) struct ProcessOutput { + pub stdout: ChildStdout, + } + + /// wait for process termination + #[derive(Message)] + #[rtype(result = "()")] + pub(super) struct CheckProcessStatus; + + #[derive(Message)] + #[rtype(result = "()")] + pub(super) struct CollectOutput(pub A::Output); + + #[derive(Message)] + #[rtype(result = "()")] + pub(super) struct Exit(pub TaskFinishStatus); + + #[derive(Message)] + #[rtype(result = "()")] + pub struct Stop; +} + +use message::*; + +#[derive(Debug)] +pub struct TaskConfig { + base_dir: Arc, + instance_port: u16, +} + +#[derive(Serialize, Deserialize)] +#[serde(untagged)] +enum TaskOutput { + Serialized(Typegraph), + Deployed { deployed: String }, +} + +// TODO cli param +const TIMEOUT_ENV_NAME: &str = "LOADER_TIMEOUT_SECS"; +const DEFAULT_TIMEOUT: u64 = 120; + +impl TaskConfig { + pub fn init(base_dir: Arc) -> Self { + Self { + base_dir, + instance_port: get_instance_port(), + } + } +} + +#[derive(Debug)] +pub enum TaskFinishStatus { + Success(Vec), + Failure, + Cancelled, +} + +pub struct TaskActor { + action: Action, + process: Option>, + task_manager: Addr>, + console: Addr, + collected_output: Vec, + timeout_duration: Duration, +} + +impl TaskActor +where + A: TaskAction, +{ + pub fn new(action: A, task_manager: Addr>, console: Addr) -> Self { + Self { + process: None, + task_manager, + console, + action, + collected_output: Default::default(), + // TODO doc? + timeout_duration: Duration::from_secs( + std::env::var(TIMEOUT_ENV_NAME) + .map(|s| { + s.parse::() + .map_err(|_| ()) + .and_then(|n| if n >= 1 { Ok(n) } else { Err(()) }) + .expect(&format!( + "{TIMEOUT_ENV_NAME} env value must be a positive integer" + )) + }) + .unwrap_or(DEFAULT_TIMEOUT), + ), + } + } + + fn get_path(&self) -> &Path { + self.action.get_path() + } + + fn get_path_owned(&self) -> Arc { + self.action.get_path_owned() + } +} + +impl Actor for TaskActor { + type Context = Context; + + fn started(&mut self, ctx: &mut Self::Context) { + let addr = ctx.address(); + let console = self.console.clone(); + let action = self.action.clone(); + + let fut = async move { + match action.get_command().await { + Ok(cmd) => { + addr.do_send(StartProcess(cmd)); + } + Err(e) => { + console.error(e.to_string()); + addr.do_send(Exit(TaskFinishStatus::::Failure)); + } + } + }; + + ctx.spawn(fut.in_current_span().into_actor(self)); + } + + fn stopped(&mut self, _ctx: &mut Self::Context) { + trace!("task actor stopped: {:?}", self.get_path()); + } +} + +impl Handler for TaskActor { + type Result = (); + + fn handle(&mut self, StartProcess(cmd): StartProcess, ctx: &mut Context) -> Self::Result { + use process_wrap::tokio::*; + self.console.info(self.action.get_start_message()); + let spawn_res = TokioCommandWrap::from(cmd) + .wrap(KillOnDrop) + // we use sessions so that kill on drop + // signals will get all grand-children + .wrap(ProcessSession) + .spawn(); + + match spawn_res { + Ok(mut child) => { + let stdout = child.stdout().take(); + let Some(stdout) = stdout else { + self.console.error( + self.action + .get_failure_message("could not read output from process"), + ); + ctx.address().do_send(Exit(TaskFinishStatus::::Failure)); + return; + }; + + ctx.address().do_send(ProcessOutput { stdout }); + + self.process = Some(child); + + let addr = ctx.address(); + let timeout_duration = self.timeout_duration.clone(); + let path = self.get_path_owned(); + let console = self.console.clone(); + let fut = async move { + tokio::time::sleep(timeout_duration).await; + console.error(format!("task timed out for {:?}", path)); + addr.do_send(Stop); + }; + ctx.spawn(fut.in_current_span().into_actor(self)); + } + Err(err) => { + self.console.error(format!( + "failed to start task process for {:?}: {err:#}", + self.get_path() + )); + ctx.address().do_send(Exit(TaskFinishStatus::::Failure)); + } + } + } +} + +impl Handler for TaskActor { + type Result = (); + + fn handle( + &mut self, + ProcessOutput { stdout }: ProcessOutput, + ctx: &mut Context, + ) -> Self::Result { + let addr = ctx.address(); + let console = self.console.clone(); + let path = self.get_path_owned(); + + let fut = async move { + let reader = BufReader::new(stdout).lines(); + if let Err(e) = Self::loop_output_lines(reader, addr.clone(), console.clone()).await { + console.error(format!( + "failed to read process output on {:?}: {e:#}", + path + )); + addr.do_send(Exit(TaskFinishStatus::::Failure)) + } else { + // end of stdout + addr.do_send(CheckProcessStatus); + } + }; + ctx.spawn(fut.in_current_span().into_actor(self)); + } +} + +impl Handler for TaskActor { + type Result = (); + + fn handle(&mut self, _msg: CheckProcessStatus, ctx: &mut Context) -> Self::Result { + let Some(process) = self.process.take() else { + self.console + .error(format!("task process not found for {:?}", self.get_path())); + ctx.address().do_send(Exit(TaskFinishStatus::::Failure)); + return (); + }; + + let addr = ctx.address(); + let console = self.console.clone(); + let path = self.get_path_owned(); + let action = self.action.clone(); + + let fut = async move { + match Box::into_pin(process.wait_with_output()).await { + Ok(output) => { + if output.status.success() { + // logging in Exit handler + addr.do_send(Exit(TaskFinishStatus::::Success(Default::default()))); + } else { + console.error(action.get_failure_message(&format!( + "process failed with code {:?}", + output.status.code() + ))); + console.error(format!( + "(stderr):\n{}", + std::str::from_utf8(&output.stderr) + .context("invalid utf8 in task output (stderr)") + .unwrap_or_log() + )); + addr.do_send(Exit(TaskFinishStatus::::Failure)); + } + } + Err(e) => { + console.error( + action + .get_failure_message(&format!("could not read process status: {e:#}")), + ); + addr.do_send(Exit(TaskFinishStatus::::Failure)); + } + } + }; + + ctx.spawn(fut.in_current_span().into_actor(self)); + } +} + +#[derive(Clone, Copy)] +enum OutputLevel { + Debug, + Info, + Warning, + Error, +} + +impl TaskActor { + async fn loop_output_lines( + mut reader: Lines>, + addr: Addr>, + console: Addr, + ) -> tokio::io::Result<()> { + let mut latest_level = OutputLevel::Info; + while let Some(line) = reader.next_line().await? { + if let Some(debug) = line.strip_prefix("debug: ") { + console.debug(debug.to_string()); + latest_level = OutputLevel::Debug; + continue; + } + + if let Some(info) = line.strip_prefix("info: ") { + console.info(info.to_string()); + latest_level = OutputLevel::Info; + continue; + } + + if let Some(warn) = line.strip_prefix("warning: ") { + console.warning(warn.to_string()); + latest_level = OutputLevel::Warning; + continue; + } + + if let Some(error) = line.strip_prefix("error:") { + console.error(error.to_string()); + latest_level = OutputLevel::Error; + continue; + } + + if let Some(data) = line.strip_prefix("output: ") { + let output: A::Output = serde_json::from_str(data)?; + addr.do_send(CollectOutput(output)); + continue; + } + + match latest_level { + OutputLevel::Debug => { + console.debug(format!("> {}", line)); + } + OutputLevel::Info => { + console.info(format!("> {}", line)); + } + OutputLevel::Warning => { + console.warning(format!("> {}", line)); + } + OutputLevel::Error => { + console.error(format!("> {}", line)); + } + } + } + Ok(()) + } +} + +impl Handler> for TaskActor { + type Result = (); + + fn handle(&mut self, message: CollectOutput, ctx: &mut Context) -> Self::Result { + self.collected_output.push(message.0); + } +} + +impl Handler> for TaskActor { + type Result = (); + + fn handle(&mut self, mut message: Exit, ctx: &mut Context) -> Self::Result { + if let TaskFinishStatus::::Success(res) = &mut message.0 { + self.console.info(self.action.get_success_message(res)); + std::mem::swap(res, &mut self.collected_output); + } + self.task_manager + .do_send(task_manager::message::UpdateTaskStatus::Finished { + path: self.get_path_owned(), + status: message.0, + }); + ctx.stop(); + } +} + +impl Handler for TaskActor { + type Result = (); + + fn handle(&mut self, _msg: Stop, ctx: &mut Context) -> Self::Result { + let path = self.get_path_owned(); + if let Some(process) = &mut self.process { + self.console.warning(format!("killing task for {:?}", path)); + process.start_kill().unwrap(); + } + } +} diff --git a/meta-cli/src/deploy/actors/task/action.rs b/meta-cli/src/deploy/actors/task/action.rs new file mode 100644 index 0000000000..50ae05d2ce --- /dev/null +++ b/meta-cli/src/deploy/actors/task/action.rs @@ -0,0 +1,205 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + +use super::{command::CommandBuilder, TaskConfig}; +use crate::interlude::*; +use common::typegraph::Typegraph; +use owo_colors::OwoColorize; +use serde::Deserialize; +use std::{path::Path, sync::Arc}; +use tokio::{process::Command, sync::OwnedSemaphorePermit}; + +pub trait TaskActionGenerator: Clone { + type Action: TaskAction; + + fn generate(&self, path: Arc, permit: OwnedSemaphorePermit) -> Self::Action; +} + +pub trait TaskAction: std::fmt::Debug + Clone + Send + Unpin { + type Output: serde::de::DeserializeOwned + std::fmt::Debug + Unpin + Send; + type Generator: TaskActionGenerator + Unpin; + + async fn get_command(&self) -> Result; + fn get_path(&self) -> &Path; + fn get_path_owned(&self) -> Arc; + + fn get_start_message(&self) -> String; + fn get_success_message(&self, res: &[Self::Output]) -> String; + fn get_failure_message(&self, err: &str) -> String; +} + +pub type SerializeAction = Arc; + +#[derive(Debug)] +pub struct SerializeActionInner { + path: Arc, + task_config: Arc, + #[allow(unused)] + permit: OwnedSemaphorePermit, +} + +#[derive(Clone)] +pub struct SerializeActionGenerator { + task_config: Arc, +} + +impl SerializeActionGenerator { + pub fn new(task_config: TaskConfig) -> Self { + Self { + task_config: Arc::new(task_config), + } + } +} + +impl TaskActionGenerator for SerializeActionGenerator { + type Action = SerializeAction; + + fn generate(&self, path: Arc, permit: OwnedSemaphorePermit) -> Self::Action { + SerializeActionInner { + path, + task_config: self.task_config.clone(), + permit, + } + .into() + } +} + +impl TaskAction for SerializeAction { + type Output = Typegraph; + type Generator = SerializeActionGenerator; + + async fn get_command(&self) -> Result { + CommandBuilder { + path: self.path.clone(), + task_config: self.task_config.clone(), + action_env: "serialize", + } + .build() + .await + } + + fn get_path(&self) -> &Path { + return &self.path; + } + + fn get_path_owned(&self) -> Arc { + return self.path.clone(); + } + + fn get_start_message(&self) -> String { + format!("starting serialization process for {:?}", self.path) + } + + fn get_success_message(&self, res: &[Self::Output]) -> String { + let names = res + .iter() + .map(|tg| format!("{}", tg.name().unwrap_or_log().yellow())) + .collect::>() + .join(", "); + format!( + "{icon} successful serialization from {path:?}: {names}", + icon = "✓".green(), + path = self.path, + ) + } + + fn get_failure_message(&self, err: &str) -> String { + format!( + "{icon} failed to serialize {path:?}: {err}", + icon = "✗".red(), + path = self.path, + err = err + ) + } +} + +pub type DeployAction = Arc; + +#[derive(Debug)] +pub struct DeployActionInner { + path: Arc, + task_config: Arc, + #[allow(unused)] + permit: OwnedSemaphorePermit, +} + +#[derive(Clone)] +pub struct DeployActionGenerator { + task_config: Arc, +} + +impl DeployActionGenerator { + pub fn new(task_config: TaskConfig) -> Self { + Self { + task_config: Arc::new(task_config), + } + } +} + +impl TaskActionGenerator for DeployActionGenerator { + type Action = DeployAction; + + fn generate(&self, path: Arc, permit: OwnedSemaphorePermit) -> Self::Action { + DeployActionInner { + path, + task_config: self.task_config.clone(), + permit, + } + .into() + } +} + +#[derive(Deserialize, Debug)] +pub struct DeployOutput { + deployed: String, +} + +impl TaskAction for DeployAction { + type Output = DeployOutput; + type Generator = DeployActionGenerator; + + async fn get_command(&self) -> Result { + CommandBuilder { + path: self.path.clone(), + task_config: self.task_config.clone(), + action_env: "deploy", + } + .build() + .await + } + + fn get_path(&self) -> &Path { + return &self.path; + } + + fn get_path_owned(&self) -> Arc { + return self.path.clone(); + } + + fn get_start_message(&self) -> String { + format!("starting deployment process for {:?}", self.path) + } + + fn get_success_message(&self, res: &[Self::Output]) -> String { + let deployed = res + .iter() + .map(|output| format!("{}", output.deployed.yellow())) + .collect::>() + .join(", "); + format!( + "{icon} successful deployment from {path:?}: {deployed}", + icon = "✓".green(), + path = self.path, + deployed = deployed + ) + } + + fn get_failure_message(&self, err: &str) -> String { + format!( + "{icon} failed to deploy {path:?}: {err}", + icon = "✗".red(), + path = self.path, + err = err + ) + } +} diff --git a/meta-cli/src/deploy/actors/task/command.rs b/meta-cli/src/deploy/actors/task/command.rs new file mode 100644 index 0000000000..04b856eda2 --- /dev/null +++ b/meta-cli/src/deploy/actors/task/command.rs @@ -0,0 +1,50 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + +use super::TaskConfig; +use crate::interlude::*; +use crate::{config::ModuleType, typegraph::loader::get_task_command}; +use std::process::Stdio; +use std::{path::Path, sync::Arc}; +use tokio::process::Command; + +pub(super) struct CommandBuilder { + pub path: Arc, + pub task_config: Arc, + pub action_env: &'static str, +} + +impl CommandBuilder { + pub(super) async fn build(&self) -> Result { + if !tokio::fs::try_exists(&self.path) + .await + .map_err(|e| eyre::eyre!("typegraph file {:?} does not exist: {:#}", self.path, e))? + { + return Err(eyre::eyre!( + "typegraph file {:?} does not exist", + &self.path + )); + } + + let path: &Path = &self.path; + // TODO move into this file + let mut command = get_task_command( + ModuleType::try_from(path).unwrap_or_log(), + path, + &self.task_config.base_dir, + ) + .await + .map_err(|e| eyre::eyre!("failed to get task command: {:#}", e))?; + command + .env("MCLI_TG_PATH", path.display().to_string()) + .env( + "MCLI_SERVER_PORT", + self.task_config.instance_port.to_string(), + ) + .env("MCLI_TASK_ACTION", self.action_env) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); + + Ok(command) + } +} diff --git a/meta-cli/src/deploy/actors/task_manager.rs b/meta-cli/src/deploy/actors/task_manager.rs new file mode 100644 index 0000000000..53a2cb1278 --- /dev/null +++ b/meta-cli/src/deploy/actors/task_manager.rs @@ -0,0 +1,311 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + +use std::collections::HashSet; + +use futures::channel::oneshot; +use indexmap::IndexMap; +use tokio::sync::{OwnedSemaphorePermit, Semaphore}; + +use crate::interlude::*; + +use super::{ + console::{Console, ConsoleActor}, + task::{ + self, + action::{TaskAction, TaskActionGenerator}, + TaskActor, TaskFinishStatus, + }, +}; + +pub mod message { + use super::*; + + #[derive(Message)] + #[rtype(result = "()")] + pub struct AddTask { + pub path: Arc, + pub reason: TaskReason, + } + + #[derive(Message)] + #[rtype(result = "()")] + pub(super) struct StartTask { + pub path: Arc, + pub permit: OwnedSemaphorePermit, + } + + #[derive(Message)] + #[rtype(result = "()")] + pub enum UpdateTaskStatus { + Started { + path: Arc, + addr: Addr>, + }, + Finished { + path: Arc, + // for report + status: TaskFinishStatus, + }, + } + + /// manual stop (by CTRL-C handler) + #[derive(Message)] + #[rtype(result = "()")] + pub struct Stop; + + #[derive(Message)] + #[rtype(result = "()")] + pub struct ForceStop; + + #[derive(Message)] + #[rtype(result = "()")] + pub struct Restart; +} + +use message::*; + +pub enum StopSchedule { + Manual, + Automatic, +} + +enum Status { + Default, + Stopping, // waiting for active tasks to finish; cancel pending tasks +} + +#[derive(Clone, Debug)] +pub enum StopReason { + Natural, + Restart, + Manual, + ManualForced, + Error, +} + +#[derive(Debug)] +pub struct ReportEntry { + pub path: Arc, + pub status: TaskFinishStatus, +} + +#[derive(Debug)] +pub struct Report { + pub stop_reason: StopReason, + pub entries: Vec>, +} + +pub struct TaskManager { + action_generator: A::Generator, + active_tasks: HashMap, Addr>>, + pending_tasks: HashSet>, + permits: Arc, + report_tx: Option>>, + stop_reason: Option, + reports: IndexMap, TaskFinishStatus>, + console: Addr, +} + +impl TaskManager { + pub fn new( + action_generator: A::Generator, + max_parallel_tasks: usize, + report_tx: oneshot::Sender>, + console: Addr, + ) -> Self { + Self { + action_generator, + active_tasks: Default::default(), + pending_tasks: Default::default(), + permits: Semaphore::new(max_parallel_tasks).into(), + report_tx: Some(report_tx), + stop_reason: None, + reports: Default::default(), + console, + } + } + + pub fn auto_stop(mut self) -> Self { + self.stop_reason = Some(StopReason::Natural); + self + } +} + +#[derive(Debug)] +pub enum TaskReason { + User, // single file specified with the -f option + Discovery, + FileChanged, + // FileCreated, + DependencyChanged(PathBuf), + Retry(usize), +} + +impl Actor for TaskManager { + type Context = Context; + + fn started(&mut self, _ctx: &mut Self::Context) { + // this cannot mess with the interactive deployment + self.console.debug("started task manager".to_string()); + } + + fn stopped(&mut self, _ctx: &mut Self::Context) { + trace!("TaskManager stopped"); + // send report + let report = Report { + stop_reason: self + .stop_reason + .take() + .ok_or_else(|| eyre::eyre!("missing stop reason in task manager")) + .unwrap_or_log(), + entries: std::mem::take(&mut self.reports) + .into_iter() + .map(|(path, status)| ReportEntry { path, status }) + .collect(), + }; + + debug!("sending report: {:?}", report); + self.report_tx.take().unwrap().send(report).unwrap_or_log(); + } +} + +impl Handler for TaskManager { + type Result = (); + + fn handle(&mut self, msg: AddTask, ctx: &mut Context) -> Self::Result { + match &msg.reason { + TaskReason::User => {} + TaskReason::Discovery => { + self.console.info(format!( + "discovered typegraph definition module {:?}", + msg.path + )); + } + TaskReason::FileChanged => { + self.console + .info(format!("file changed {:?}, reloading", msg.path)); + } + TaskReason::DependencyChanged(dep) => { + self.console.info(format!( + "dependency changed {:?}, reloading {:?}", + dep, msg.path + )); + } + TaskReason::Retry(_) => { + // TODO retry no? + self.console.info(format!("retrying {:?}", msg.path)); + } + } + + self.pending_tasks.insert(msg.path.clone()); + + let path = msg.path.clone(); + let permits = self.permits.clone(); + let addr = ctx.address(); + + let fut = async move { + let permit = permits.acquire_owned().await.unwrap_or_log(); + addr.do_send(StartTask { path, permit }); + }; + + ctx.spawn(fut.in_current_span().into_actor(self)); + } +} + +impl Handler for TaskManager { + type Result = (); + + fn handle(&mut self, message: StartTask, ctx: &mut Context) -> Self::Result { + if let Some(stop_reason) = &self.stop_reason { + match stop_reason { + StopReason::Natural => {} + _ => { + self.console + .warning(format!("task cancelled for {:?}", message.path)); + return; + } + } + } + let action = self + .action_generator + .generate(message.path.clone(), message.permit); + let path = action.get_path_owned(); + let task_addr = TaskActor::new(action, ctx.address(), self.console.clone()).start(); + self.pending_tasks.remove(&path); + self.active_tasks.insert(path.clone(), task_addr); + } +} + +impl Handler> for TaskManager { + type Result = (); + + fn handle(&mut self, message: UpdateTaskStatus, ctx: &mut Context) -> Self::Result { + match message { + UpdateTaskStatus::Started { + path: typegraph_path, + addr, + } => { + // TODO remove + } + UpdateTaskStatus::Finished { + path: typegraph_path, + status: _, // for report + } => { + self.active_tasks.remove(&typegraph_path); + if self.active_tasks.is_empty() { + match self.stop_reason { + Some(StopReason::Natural | StopReason::Manual) => { + self.console.info("all tasks finished".to_string()); + ctx.stop(); + } + _ => {} + } + } + } + } + } +} + +impl Handler for TaskManager { + type Result = (); + + fn handle(&mut self, _msg: Stop, ctx: &mut Context) -> Self::Result { + match self.stop_reason.clone() { + Some(reason) => match reason { + StopReason::Natural | StopReason::Restart => { + self.stop_reason = Some(StopReason::Manual); + } + StopReason::Manual => { + self.stop_reason = Some(StopReason::ManualForced); + ctx.address().do_send(ForceStop); + } + StopReason::ManualForced | StopReason::Error => {} + }, + None => { + self.stop_reason = Some(StopReason::Manual); + } + } + } +} + +impl Handler for TaskManager { + type Result = (); + + fn handle(&mut self, _msg: ForceStop, ctx: &mut Context) -> Self::Result { + self.console + .warning("force stopping active tasks".to_string()); + for (_, addr) in self.active_tasks.iter() { + addr.do_send(task::message::Stop) + } + } +} + +impl Handler for TaskManager { + type Result = (); + + fn handle(&mut self, _msg: Restart, ctx: &mut Context) -> Self::Result { + self.stop_reason = Some(StopReason::Restart); + ctx.address().do_send(ForceStop); + } +} diff --git a/meta-cli/src/deploy/actors/watcher.rs b/meta-cli/src/deploy/actors/watcher.rs index 0fc5b1749f..d79516c4da 100644 --- a/meta-cli/src/deploy/actors/watcher.rs +++ b/meta-cli/src/deploy/actors/watcher.rs @@ -1,3 +1,4 @@ +use crate::deploy::push::pusher::RetryManager; // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 use crate::interlude::*; @@ -13,11 +14,35 @@ use std::{sync::Arc, time::Duration}; use tokio::sync::mpsc; use super::console::Console; +use super::task::action::DeployAction; +use super::task_manager::{self, TaskManager, TaskReason}; use crate::config::Config; use crate::deploy::actors::console::ConsoleActor; use crate::typegraph::dependency_graph::DependencyGraph; use crate::typegraph::loader::discovery::FileFilter; +pub mod message { + use super::*; + + #[derive(Message)] + #[rtype(result = "()")] + pub struct Stop; + + #[derive(Message)] + #[rtype(result = "()")] + pub(super) struct File(pub PathBuf); + + #[derive(Message)] + #[rtype(result = "()")] + pub struct UpdateDependencies(pub Arc); + + #[derive(Message)] + #[rtype(result = "()")] + pub struct RemoveTypegraph(pub PathBuf); +} + +use message::*; + #[derive(Debug)] pub enum Event { DependencyChanged { @@ -34,31 +59,16 @@ pub enum Event { } pub struct WatcherActor { + // TODO config path only config: Arc, directory: Arc, - event_tx: mpsc::UnboundedSender, + task_manager: Addr>, console: Addr, debouncer: Option>, dependency_graph: DependencyGraph, file_filter: FileFilter, } -#[derive(Message)] -#[rtype(result = "()")] -pub struct Stop; - -#[derive(Message)] -#[rtype(result = "()")] -struct File(PathBuf); - -#[derive(Message)] -#[rtype(result = "()")] -pub struct UpdateDependencies(pub Arc); - -#[derive(Message)] -#[rtype(result = "()")] -pub struct RemoveTypegraph(pub PathBuf); - impl Actor for WatcherActor { type Context = Context; @@ -81,14 +91,14 @@ impl WatcherActor { pub fn new( config: Arc, directory: Arc, - event_tx: mpsc::UnboundedSender, + task_manager: Addr>, console: Addr, ) -> Result { let file_filter = FileFilter::new(&config)?; Ok(Self { config, directory, - event_tx, + task_manager, console, debouncer: None, dependency_graph: DependencyGraph::default(), @@ -135,10 +145,15 @@ impl Handler for WatcherActor { impl Handler for WatcherActor { type Result = (); - fn handle(&mut self, msg: File, _ctx: &mut Self::Context) -> Self::Result { + fn handle(&mut self, msg: File, ctx: &mut Self::Context) -> Self::Result { let path = msg.0; if &path == self.config.path.as_ref().unwrap() { - self.event_tx.send(Event::ConfigChanged).unwrap(); + self.console + .warning("metatype configuration filie changed".to_owned()); + self.console + .warning("reloading all the typegraphs".to_owned()); + self.task_manager.do_send(task_manager::message::Restart); + ctx.stop(); } else { let reverse_deps = self.dependency_graph.get_rdeps(&path); if !reverse_deps.is_empty() { @@ -151,13 +166,11 @@ impl Handler for WatcherActor { self.console .info(format!(" -> {rel_path}", rel_path = rel_path.display())); - if let Err(e) = self.event_tx.send(Event::DependencyChanged { - typegraph_module: path, - dependency_path, - }) { - self.console.error(format!("Failed to send event: {}", e)); - // panic?? - } + RetryManager::clear_counter(&path); + self.task_manager.do_send(task_manager::message::AddTask { + path: path.into(), + reason: TaskReason::DependencyChanged(dependency_path), + }); } } else if path.try_exists().unwrap() { let mut searcher = SearcherBuilder::new() @@ -167,18 +180,20 @@ impl Handler for WatcherActor { if !self.file_filter.is_excluded(&path, &mut searcher) { let rel_path = diff_paths(&path, &self.directory).unwrap(); self.console.info(format!("File modified: {rel_path:?}")); - if let Err(e) = self.event_tx.send(Event::TypegraphModuleChanged { - typegraph_module: path, - }) { - self.console.error(format!("Failed to send event: {}", e)); - // panic?? - } + + RetryManager::clear_counter(&path); + self.task_manager.do_send(task_manager::message::AddTask { + path: path.into(), + reason: TaskReason::FileChanged, + }); } - } else if let Err(e) = self.event_tx.send(Event::TypegraphModuleDeleted { - typegraph_module: path, - }) { - self.console.error(format!("Failed to send event: {}", e)); - // panic?? + } else { + RetryManager::clear_counter(&path); + // TODO method call + ctx.address().do_send(RemoveTypegraph(path.clone())); + + // TODO delete typegraph in typegate + // TODO cancel any eventual active deployment task } } } diff --git a/meta-cli/src/deploy/push/pusher.rs b/meta-cli/src/deploy/push/pusher.rs index ae65fed009..eb8850a5b2 100644 --- a/meta-cli/src/deploy/push/pusher.rs +++ b/meta-cli/src/deploy/push/pusher.rs @@ -2,6 +2,7 @@ // SPDX-License-Identifier: MPL-2.0 use crate::interlude::*; +use std::sync::Mutex; use std::time::Duration; use actix::prelude::*; diff --git a/meta-cli/src/main.rs b/meta-cli/src/main.rs index c32bb197c7..fd2ffae8b7 100644 --- a/meta-cli/src/main.rs +++ b/meta-cli/src/main.rs @@ -6,7 +6,7 @@ mod interlude { pub use std::{ collections::HashMap, path::{Path, PathBuf}, - sync::{Arc, Mutex}, + sync::Arc, }; pub use color_eyre::{ @@ -27,6 +27,7 @@ mod interlude { pub use async_trait::async_trait; pub use crate::{anyhow_to_eyre, map_ferr}; + pub use actix::prelude::*; } mod cli; @@ -83,7 +84,8 @@ fn main() -> Result<()> { }; if args.verbose.is_present() { - std::env::set_var("RUST_LOG", args.verbose.log_level_filter().to_string()); + let filter = args.verbose.log_level_filter().to_string(); + std::env::set_var("RUST_LOG", format!("warn,meta={filter}")); } logger::init(); diff --git a/meta-cli/src/typegraph/loader/mod.rs b/meta-cli/src/typegraph/loader/mod.rs index 5e184635a2..9a8b2a7d4f 100644 --- a/meta-cli/src/typegraph/loader/mod.rs +++ b/meta-cli/src/typegraph/loader/mod.rs @@ -1,7 +1,8 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 use crate::interlude::*; -use eyre::Error; +use actix::Arbiter; +use eyre::{eyre, Error}; pub mod discovery; @@ -117,6 +118,14 @@ impl<'a> Loader<'a> { error: err.into(), })?; + // let stderr = child.stderr().ok_or_else(|| LoaderError::LoaderProcess { + // path: path.clone(), + // error: eyre!("could not get stderr from loader process"), + // })?; + // Arbiter::current().spawn(async move { + // // + // }); + let duration = get_loader_timeout_duration().map_err(|err| LoaderError::Other { error: err })?; match timeout(duration, Box::into_pin(child.wait())).await { @@ -130,41 +139,11 @@ impl<'a> Loader<'a> { error: e.into(), // generic })?; if exit.success() { - #[cfg(debug_assertions)] - { - if let Some(stderr) = child.stderr().take().as_mut() { - // TODO console actor - let mut buff = String::new(); - stderr.read_to_string(&mut buff).await.map_err(|e| { - LoaderError::LoaderProcess { - path: path.clone(), - error: e.into(), - } - })?; - if !buff.is_empty() { - info!("loader stderr: {buff}"); - } - } - } Ok(TypegraphInfos { path: path.as_ref().to_owned(), base_path: self.base_dir.clone(), }) } else { - let stderr = match child.stderr().take().as_mut() { - Some(value) => { - let mut buff = String::new(); - value.read_to_string(&mut buff).await.map_err(|e| { - LoaderError::LoaderProcess { - path: path.clone(), - error: e.into(), - } - })?; - buff.to_owned() - } - None => "".to_string(), - }; - let stdout = match child.stdout().take().as_mut() { Some(value) => { let mut buff = String::new(); @@ -184,7 +163,6 @@ impl<'a> Loader<'a> { path: path.clone(), error: ferr!("loader process err") .section(stdout.trim().to_string().header("Stdout:")) - .section(stderr.trim().to_string().header("Stderr:")) .suppress_backtrace(true), }) } @@ -274,6 +252,87 @@ impl<'a> Loader<'a> { } } +#[tracing::instrument(err)] +pub async fn get_task_command( + module_type: ModuleType, + path: &Path, + base_dir: &Path, +) -> Result { + if let Ok(argv_str) = std::env::var("MCLI_LOADER_CMD") { + let argv = argv_str.split(' ').collect::>(); + let mut command = Command::new(argv[0]); + command + .args(&argv[1..]) + .arg(path.to_str().unwrap()) + .arg(base_dir); + return Ok(command); + } + + match module_type { + ModuleType::Python => { + ensure_venv(path).map_err(|e| LoaderError::PythonVenvNotFound { + path: path.to_owned().into(), + error: e, + })?; + let loader_py = + std::env::var("MCLI_LOADER_PY").unwrap_or_else(|_| "python3".to_string()); + let mut loader_py = loader_py.split_whitespace(); + let mut command = Command::new(loader_py.next().unwrap()); + command + .args(loader_py) + .arg(path.to_str().unwrap()) + .current_dir(base_dir) + .env("PYTHONUNBUFFERED", "1") + .env("PYTHONDONTWRITEBYTECODE", "1") + .env("PY_TG_COMPATIBILITY", "1"); + Ok(command) + } + ModuleType::Deno => { + // TODO cache result? + match detect_deno_loader_cmd(path) + .await + .map_err(|error| LoaderError::Unknown { + path: path.to_path_buf().into(), + error, + })? { + TsLoaderRt::Deno => { + log::debug!("loading typegraph using deno"); + let mut command = Command::new("deno"); + command + .arg("run") + // .arg("--unstable") + .arg("--allow-all") + .arg("--check") + .arg(path.to_str().unwrap()) + .current_dir(base_dir); + Ok(command) + } + TsLoaderRt::Node => { + log::debug!("loading typegraph using npm x tsx, make sure npm packages have been installed"); + let mut command = Command::new("npm"); + command + .arg("x") + .arg("--yes") + .arg("tsx") + .current_dir(path.parent().unwrap()) + .arg(path.to_str().unwrap()); + Ok(command) + } + TsLoaderRt::Bun => { + log::debug!("loading typegraph using bun x tsx, make sure npm packages have been installed"); + let mut command = Command::new("bun"); + command + .arg("x") + .arg("tsx") + .arg(path.to_str().unwrap()) + .current_dir(path.parent().unwrap()); + Ok(command) + } + } + } + } +} + enum TsLoaderRt { Deno, Node, diff --git a/typegate/src/services/artifact_service.ts b/typegate/src/services/artifact_service.ts index b0df233182..fff1133ef6 100644 --- a/typegate/src/services/artifact_service.ts +++ b/typegate/src/services/artifact_service.ts @@ -119,6 +119,7 @@ export class ArtifactService { const hash = await this.store.persistence.save(stream); if (hash !== meta.hash) { await this.store.persistence.delete(hash); + logger.warn("hash mismatch: {} {}", hash, meta.hash); return new Response(JSON.stringify({ error: "hash mismatch" }), { status: 403, headers: { "Content-Type": "application/json" }, From be00039ee59d734934fc783bea8e6160856187fc Mon Sep 17 00:00:00 2001 From: Natoandro Date: Wed, 29 May 2024 16:00:55 +0300 Subject: [PATCH 03/35] (doc) update actor model diagram --- meta-cli/src/deploy/actor-model.drawio.svg | 443 ++++++++++++++++++++- 1 file changed, 439 insertions(+), 4 deletions(-) diff --git a/meta-cli/src/deploy/actor-model.drawio.svg b/meta-cli/src/deploy/actor-model.drawio.svg index e12f32df85..11e7877ab4 100644 --- a/meta-cli/src/deploy/actor-model.drawio.svg +++ b/meta-cli/src/deploy/actor-model.drawio.svg @@ -1,4 +1,439 @@ - - - -
deploy command only
deploy command only
Watch-mode only (deploy command)
Watch-mode only (deploy command)
Console
actor
Console...
messages:
Load
messages:...
Discovery
actor
Discovery...
events:
Typegraph
Stopped
events:...
Loader
actor
Loader...
Dispatcher
Dispatcher
events:
Success
TransportFailure
InvalidResponse
Error
InteractionNeeded
events:...
Pusher
actor
Pusher...
events:
TypegraphModified
DependencyModified
TypegraphDeleted
ConfigModified
events:...
Watcher
actor
Watcher...
restart
restart
Dispatcher
Dispatcher
messages:
Reload
messages:...
messages:
UpdateDependencies
messages:...
Typegate
Typegate
messages:
Push
messages:...
Push lifecycle manager
Push lifecycle manag...
Ctrl-C handler
Ctrl-C handler
setStatus
retry
interact
setStatus...
Dispatcher
Dispatcher
Filesystem
Filesystem
Unpack migrations
Unpack migrations
Receives messages from all other actors
Receives messages from...
Interact (TODO)
Interact (TODO)
Text is not SVG - cannot display
+ + + + + + + +
+
+
+ + Watch-mode only (deploy command) + +
+
+
+
+ + Watch-mode only (deploy command) + +
+
+ + + + +
+
+
+ + Console + +
+ actor +
+
+
+
+ + Console... + +
+
+ + + + + +
+
+
+ + messages + + : +
+ AddTask +
+
+
+
+ + messages:... + +
+
+ + + + +
+
+
+ + Discovery + +
+ actor +
+
+
+
+ + Discovery... + +
+
+ + + + +
+
+
+ + TaskManager + +
+ actor +
+
+
+
+ + TaskManager... + +
+
+ + + + +
+
+
+ + Watcher + +
+ actor +
+
+
+
+ + Watcher... + +
+
+ + + + + +
+
+
+ Typegate +
+
+
+
+ + Typegate + +
+
+ + + + + +
+
+
+ + Ctrl-C handler + +
+
+
+
+ + Ctrl-C handler + +
+
+ + + + +
+
+
+ + Filesystem + +
+
+
+
+ + Filesystem + +
+
+ + + + + +
+
+
+ Unpack migrations +
+
+
+
+ + Unpack migrations + +
+
+ + + + +
+
+
+ + + Receives messages from all other actors + + +
+
+
+
+ + Receives messages from... + +
+
+ + + + + +
+
+
+ + messages + + : +
+ AddTask +
+ Restart +
+
+
+
+ + messages:... + +
+
+ + + + + +
+
+
+ + messages + + : +
+ AddTask +
+ Restart +
+
+
+
+ + messages:... + +
+
+ + + + +
+
+
+ + Task + +
+ actor +
+
+
+
+ + Task... + +
+
+ + + + +
+
+
+ + JS/Python process + +
+ action: deploy or serialize +
+
+
+
+ + JS/Python process... + +
+
+ + + + + +
+
+
+ start process +
+ pass settings to env +
+
+
+
+ + start process... + +
+
+ + + + + +
+
+
+ output: stdout +
+ logging; status +
+
+
+
+ + output: stdout... + +
+
+ + + + + +
+
+
+ + messages + + : +
+ Stop +
+
+
+
+ + messages:... + +
+
+ + + + + +
+
+
+ start + + Task + + actor +
+ + messages + + : Stop +
+
+
+
+ + start Task actor... + +
+
+ + + + + +
+
+
+ messages: +
+ ScheduleRetry +
+ Finished +
+
+
+
+ + messages:... + +
+
+
+ + + + + Text is not SVG - cannot display + + + +
\ No newline at end of file From f5ef1c3e91ef3ae4316a3d91e64ac1878016afef Mon Sep 17 00:00:00 2001 From: Natoandro Date: Thu, 30 May 2024 10:55:59 +0300 Subject: [PATCH 04/35] (typegraph/sdk) implement logging system --- meta-cli/src/cli/deploy.rs | 48 ++- meta-cli/src/deploy/actors/task.rs | 64 ++-- meta-cli/src/deploy/actors/task/action.rs | 338 +++++++++++------- meta-cli/src/deploy/actors/task_manager.rs | 30 +- .../src/deploy/actors/task_manager/report.rs | 55 +++ typegraph/node/sdk/src/log.ts | 46 +++ typegraph/node/sdk/src/tg_artifact_upload.ts | 29 +- typegraph/node/sdk/src/tg_deploy.ts | 21 +- typegraph/node/sdk/src/tg_manage.ts | 76 ++-- typegraph/node/sdk/src/typegraph.ts | 3 + typegraph/node/sdk/src/utils/func_utils.ts | 24 ++ 11 files changed, 490 insertions(+), 244 deletions(-) create mode 100644 meta-cli/src/deploy/actors/task_manager/report.rs create mode 100644 typegraph/node/sdk/src/log.ts diff --git a/meta-cli/src/cli/deploy.rs b/meta-cli/src/cli/deploy.rs index 11ab154555..5ada7ae8bc 100644 --- a/meta-cli/src/cli/deploy.rs +++ b/meta-cli/src/cli/deploy.rs @@ -188,29 +188,43 @@ impl Action for DeploySubcommand { } } - if deploy.options.watch { + let status = if deploy.options.watch { info!("running in watch mode"); // watch the content of a folder if self.file.is_some() { bail!("Cannot use --file in watch mode"); } watch_mode::enter_watch_mode(deploy).await?; + + ExitStatus::Failure } else { trace!("running in default mode"); // deploy a single file let deploy = default_mode::DefaultMode::init(deploy).await?; - deploy.run().await?; + let status = deploy.run().await?; server_handle.unwrap().stop(true).await; - } - Ok(()) + status + }; + + match status { + ExitStatus::Success => Ok(()), + ExitStatus::Failure => Err(eyre::eyre!("failed")), + } } } +enum ExitStatus { + Success, + Failure, +} + mod default_mode { //! non-watch mode + use crate::cli::deploy::default_mode::actors::task::TaskFinishStatus; + use super::*; pub struct DefaultMode { @@ -254,7 +268,7 @@ mod default_mode { }) } - pub async fn run(self) -> Result<()> { + pub async fn run(self) -> Result { debug!(file = ?self.deploy.file); { @@ -288,19 +302,31 @@ mod default_mode { // let stopped = loader::stopped(loader); // self.handle_loaded_typegraphs().await??; let report = self.report_rx.await?; - - // TODO display report entries + let summary = report.summary(); + self.console.info(format!("Result:\n{}", summary.text)); match report.stop_reason { - StopReason::Natural => Ok(()), + StopReason::Natural => { + if summary.success { + Ok(ExitStatus::Success) + } else { + Ok(ExitStatus::Failure) + } + } StopReason::Restart => { unreachable!("TaskManager should not restart on the default mode") } - StopReason::Manual => Err(eyre::eyre!("tasks manually stopped")), - StopReason::ManualForced => Err(eyre::eyre!("tasks manually stopped (forced)")), + StopReason::Manual => { + if summary.success { + Ok(ExitStatus::Success) + } else { + Ok(ExitStatus::Failure) + } + } // TODO read report + StopReason::ManualForced => Ok(ExitStatus::Failure), StopReason::Error => { // error should have already been reported - Err(eyre::eyre!("failed")) + Ok(ExitStatus::Failure) } } } diff --git a/meta-cli/src/deploy/actors/task.rs b/meta-cli/src/deploy/actors/task.rs index 4154ccee6e..6ad5b39c13 100644 --- a/meta-cli/src/deploy/actors/task.rs +++ b/meta-cli/src/deploy/actors/task.rs @@ -18,7 +18,7 @@ pub mod action; mod command; -use self::action::TaskAction; +use self::action::{ActionResult, TaskAction}; use super::console::{Console, ConsoleActor}; use super::task_manager::{self, TaskManager}; use crate::{com::server::get_instance_port, interlude::*}; @@ -49,7 +49,7 @@ pub mod message { #[derive(Message)] #[rtype(result = "()")] - pub(super) struct CollectOutput(pub A::Output); + pub(super) struct CollectOutput(pub ActionResult
); #[derive(Message)] #[rtype(result = "()")] @@ -90,17 +90,17 @@ impl TaskConfig { #[derive(Debug)] pub enum TaskFinishStatus { - Success(Vec), - Failure, Cancelled, + Error, + Finished(Vec>), } -pub struct TaskActor { - action: Action, +pub struct TaskActor { + action: A, process: Option>, - task_manager: Addr>, + task_manager: Addr>, console: Addr, - collected_output: Vec, + collected_output: Vec>, timeout_duration: Duration, } @@ -155,7 +155,7 @@ impl Actor for TaskActor { } Err(e) => { console.error(e.to_string()); - addr.do_send(Exit(TaskFinishStatus::::Failure)); + addr.do_send(Exit(TaskFinishStatus::::Error)); } } }; @@ -187,9 +187,9 @@ impl Handler for TaskActor { let Some(stdout) = stdout else { self.console.error( self.action - .get_failure_message("could not read output from process"), + .get_error_message("could not read output from process"), ); - ctx.address().do_send(Exit(TaskFinishStatus::::Failure)); + ctx.address().do_send(Exit(TaskFinishStatus::::Error)); return; }; @@ -213,7 +213,7 @@ impl Handler for TaskActor { "failed to start task process for {:?}: {err:#}", self.get_path() )); - ctx.address().do_send(Exit(TaskFinishStatus::::Failure)); + ctx.address().do_send(Exit(TaskFinishStatus::::Error)); } } } @@ -238,7 +238,7 @@ impl Handler for TaskActor { "failed to read process output on {:?}: {e:#}", path )); - addr.do_send(Exit(TaskFinishStatus::::Failure)) + addr.do_send(Exit(TaskFinishStatus::::Error)) } else { // end of stdout addr.do_send(CheckProcessStatus); @@ -255,7 +255,7 @@ impl Handler for TaskActor { let Some(process) = self.process.take() else { self.console .error(format!("task process not found for {:?}", self.get_path())); - ctx.address().do_send(Exit(TaskFinishStatus::::Failure)); + ctx.address().do_send(Exit(TaskFinishStatus::::Error)); return (); }; @@ -269,9 +269,9 @@ impl Handler for TaskActor { Ok(output) => { if output.status.success() { // logging in Exit handler - addr.do_send(Exit(TaskFinishStatus::::Success(Default::default()))); + addr.do_send(Exit(TaskFinishStatus::::Finished(Default::default()))); } else { - console.error(action.get_failure_message(&format!( + console.error(action.get_error_message(&format!( "process failed with code {:?}", output.status.code() ))); @@ -281,15 +281,14 @@ impl Handler for TaskActor { .context("invalid utf8 in task output (stderr)") .unwrap_or_log() )); - addr.do_send(Exit(TaskFinishStatus::::Failure)); + addr.do_send(Exit(TaskFinishStatus::::Error)); } } Err(e) => { console.error( - action - .get_failure_message(&format!("could not read process status: {e:#}")), + action.get_error_message(&format!("could not read process status: {e:#}")), ); - addr.do_send(Exit(TaskFinishStatus::::Failure)); + addr.do_send(Exit(TaskFinishStatus::::Error)); } } }; @@ -332,15 +331,21 @@ impl TaskActor { continue; } - if let Some(error) = line.strip_prefix("error:") { + if let Some(error) = line.strip_prefix("error: ") { console.error(error.to_string()); latest_level = OutputLevel::Error; continue; } - if let Some(data) = line.strip_prefix("output: ") { - let output: A::Output = serde_json::from_str(data)?; - addr.do_send(CollectOutput(output)); + if let Some(data_json) = line.strip_prefix("success: ") { + let data: A::SuccessData = serde_json::from_str(data_json)?; + addr.do_send(CollectOutput(Ok(data))); + continue; + } + + if let Some(data_json) = line.strip_prefix("failure: ") { + let data: A::FailureData = serde_json::from_str(data_json)?; + addr.do_send(CollectOutput(Err(data))); continue; } @@ -367,6 +372,14 @@ impl Handler> for TaskActor { type Result = (); fn handle(&mut self, message: CollectOutput, ctx: &mut Context) -> Self::Result { + match &message.0 { + Ok(data) => { + self.console.info(self.action.get_success_message(&data)); + } + Err(data) => { + self.console.error(self.action.get_failure_message(&data)); + } + } self.collected_output.push(message.0); } } @@ -375,8 +388,7 @@ impl Handler> for TaskActor { type Result = (); fn handle(&mut self, mut message: Exit, ctx: &mut Context) -> Self::Result { - if let TaskFinishStatus::::Success(res) = &mut message.0 { - self.console.info(self.action.get_success_message(res)); + if let TaskFinishStatus::::Finished(res) = &mut message.0 { std::mem::swap(res, &mut self.collected_output); } self.task_manager diff --git a/meta-cli/src/deploy/actors/task/action.rs b/meta-cli/src/deploy/actors/task/action.rs index 50ae05d2ce..be8e5d9d84 100644 --- a/meta-cli/src/deploy/actors/task/action.rs +++ b/meta-cli/src/deploy/actors/task/action.rs @@ -15,8 +15,13 @@ pub trait TaskActionGenerator: Clone { fn generate(&self, path: Arc, permit: OwnedSemaphorePermit) -> Self::Action; } +pub trait OutputData: serde::de::DeserializeOwned + std::fmt::Debug + Unpin + Send { + fn get_typegraph_name(&self) -> String; +} + pub trait TaskAction: std::fmt::Debug + Clone + Send + Unpin { - type Output: serde::de::DeserializeOwned + std::fmt::Debug + Unpin + Send; + type SuccessData: OutputData; + type FailureData: OutputData; type Generator: TaskActionGenerator + Unpin; async fn get_command(&self) -> Result; @@ -24,182 +29,253 @@ pub trait TaskAction: std::fmt::Debug + Clone + Send + Unpin { fn get_path_owned(&self) -> Arc; fn get_start_message(&self) -> String; - fn get_success_message(&self, res: &[Self::Output]) -> String; - fn get_failure_message(&self, err: &str) -> String; + fn get_success_message(&self, output: &Self::SuccessData) -> String; + fn get_failure_message(&self, output: &Self::FailureData) -> String; + fn get_error_message(&self, err: &str) -> String; } -pub type SerializeAction = Arc; +pub type ActionResult = Result; -#[derive(Debug)] -pub struct SerializeActionInner { - path: Arc, - task_config: Arc, - #[allow(unused)] - permit: OwnedSemaphorePermit, +pub fn get_typegraph_name(res: &ActionResult) -> String { + match res { + Ok(success) => success.get_typegraph_name(), + Err(failure) => failure.get_typegraph_name(), + } } -#[derive(Clone)] -pub struct SerializeActionGenerator { - task_config: Arc, -} +pub use deploy::*; +pub use serialize::*; -impl SerializeActionGenerator { - pub fn new(task_config: TaskConfig) -> Self { - Self { - task_config: Arc::new(task_config), - } +mod serialize { + use super::*; + + pub type SerializeAction = Arc; + + #[derive(Debug)] + pub struct SerializeActionInner { + path: Arc, + task_config: Arc, + #[allow(unused)] + permit: OwnedSemaphorePermit, } -} -impl TaskActionGenerator for SerializeActionGenerator { - type Action = SerializeAction; + #[derive(Clone)] + pub struct SerializeActionGenerator { + task_config: Arc, + } - fn generate(&self, path: Arc, permit: OwnedSemaphorePermit) -> Self::Action { - SerializeActionInner { - path, - task_config: self.task_config.clone(), - permit, + impl SerializeActionGenerator { + pub fn new(task_config: TaskConfig) -> Self { + Self { + task_config: Arc::new(task_config), + } } - .into() } -} -impl TaskAction for SerializeAction { - type Output = Typegraph; - type Generator = SerializeActionGenerator; + impl TaskActionGenerator for SerializeActionGenerator { + type Action = SerializeAction; - async fn get_command(&self) -> Result { - CommandBuilder { - path: self.path.clone(), - task_config: self.task_config.clone(), - action_env: "serialize", + fn generate(&self, path: Arc, permit: OwnedSemaphorePermit) -> Self::Action { + SerializeActionInner { + path, + task_config: self.task_config.clone(), + permit, + } + .into() } - .build() - .await } - fn get_path(&self) -> &Path { - return &self.path; + #[derive(Deserialize, Debug)] + pub struct SerializeError { + typegraph: String, + error: String, } - fn get_path_owned(&self) -> Arc { - return self.path.clone(); + impl OutputData for Typegraph { + fn get_typegraph_name(&self) -> String { + self.name().unwrap() + } } - fn get_start_message(&self) -> String { - format!("starting serialization process for {:?}", self.path) + impl OutputData for SerializeError { + fn get_typegraph_name(&self) -> String { + self.typegraph.clone() + } } - fn get_success_message(&self, res: &[Self::Output]) -> String { - let names = res - .iter() - .map(|tg| format!("{}", tg.name().unwrap_or_log().yellow())) - .collect::>() - .join(", "); - format!( - "{icon} successful serialization from {path:?}: {names}", - icon = "✓".green(), - path = self.path, - ) - } + impl TaskAction for SerializeAction { + type SuccessData = Typegraph; + type FailureData = SerializeError; + type Generator = SerializeActionGenerator; + + async fn get_command(&self) -> Result { + CommandBuilder { + path: self.path.clone(), + task_config: self.task_config.clone(), + action_env: "serialize", + } + .build() + .await + } - fn get_failure_message(&self, err: &str) -> String { - format!( - "{icon} failed to serialize {path:?}: {err}", - icon = "✗".red(), - path = self.path, - err = err - ) - } -} + fn get_path(&self) -> &Path { + return &self.path; + } -pub type DeployAction = Arc; + fn get_path_owned(&self) -> Arc { + return self.path.clone(); + } -#[derive(Debug)] -pub struct DeployActionInner { - path: Arc, - task_config: Arc, - #[allow(unused)] - permit: OwnedSemaphorePermit, -} + fn get_start_message(&self) -> String { + format!("starting serialization process for {:?}", self.path) + } -#[derive(Clone)] -pub struct DeployActionGenerator { - task_config: Arc, -} + fn get_success_message(&self, output: &Self::SuccessData) -> String { + format!( + "{icon} successfully serialized typegraph {name} from {path:?}", + icon = "✓".green(), + name = output.get_typegraph_name().cyan(), + path = self.path, + ) + } + + fn get_failure_message(&self, output: &Self::FailureData) -> String { + format!( + "{icon} failed to serialize typegraph {name} from {path:?}: {err}", + icon = "✗".red(), + name = output.get_typegraph_name().cyan(), + path = self.path, + err = output.error, + ) + } -impl DeployActionGenerator { - pub fn new(task_config: TaskConfig) -> Self { - Self { - task_config: Arc::new(task_config), + fn get_error_message(&self, err: &str) -> String { + format!( + "{icon} failed to serialize typegraph(s) from {path:?}: {err}", + icon = "✗".red(), + path = self.path, + err = err, + ) } } } -impl TaskActionGenerator for DeployActionGenerator { - type Action = DeployAction; +mod deploy { + use super::*; + pub type DeployAction = Arc; - fn generate(&self, path: Arc, permit: OwnedSemaphorePermit) -> Self::Action { - DeployActionInner { - path, - task_config: self.task_config.clone(), - permit, - } - .into() + #[derive(Debug)] + pub struct DeployActionInner { + path: Arc, + task_config: Arc, + #[allow(unused)] + permit: OwnedSemaphorePermit, } -} -#[derive(Deserialize, Debug)] -pub struct DeployOutput { - deployed: String, -} + #[derive(Clone)] + pub struct DeployActionGenerator { + task_config: Arc, + } + + impl DeployActionGenerator { + pub fn new(task_config: TaskConfig) -> Self { + Self { + task_config: Arc::new(task_config), + } + } + } -impl TaskAction for DeployAction { - type Output = DeployOutput; - type Generator = DeployActionGenerator; + impl TaskActionGenerator for DeployActionGenerator { + type Action = DeployAction; - async fn get_command(&self) -> Result { - CommandBuilder { - path: self.path.clone(), - task_config: self.task_config.clone(), - action_env: "deploy", + fn generate(&self, path: Arc, permit: OwnedSemaphorePermit) -> Self::Action { + DeployActionInner { + path, + task_config: self.task_config.clone(), + permit, + } + .into() } - .build() - .await } - fn get_path(&self) -> &Path { - return &self.path; + #[derive(Deserialize, Debug)] + pub struct DeploySuccess { + typegraph: String, } - fn get_path_owned(&self) -> Arc { - return self.path.clone(); + #[derive(Deserialize, Debug)] + pub struct DeployError { + typegraph: String, + error: String, + #[serde(default)] + follow_up: Option, // todo migration } - fn get_start_message(&self) -> String { - format!("starting deployment process for {:?}", self.path) + impl OutputData for DeploySuccess { + fn get_typegraph_name(&self) -> String { + self.typegraph.clone() + } } - fn get_success_message(&self, res: &[Self::Output]) -> String { - let deployed = res - .iter() - .map(|output| format!("{}", output.deployed.yellow())) - .collect::>() - .join(", "); - format!( - "{icon} successful deployment from {path:?}: {deployed}", - icon = "✓".green(), - path = self.path, - deployed = deployed - ) + impl OutputData for DeployError { + fn get_typegraph_name(&self) -> String { + self.typegraph.clone() + } } - fn get_failure_message(&self, err: &str) -> String { - format!( - "{icon} failed to deploy {path:?}: {err}", - icon = "✗".red(), - path = self.path, - err = err - ) + impl TaskAction for DeployAction { + type SuccessData = DeploySuccess; + type FailureData = DeployError; + type Generator = DeployActionGenerator; + + async fn get_command(&self) -> Result { + CommandBuilder { + path: self.path.clone(), + task_config: self.task_config.clone(), + action_env: "deploy", + } + .build() + .await + } + + fn get_path(&self) -> &Path { + return &self.path; + } + + fn get_path_owned(&self) -> Arc { + return self.path.clone(); + } + + fn get_start_message(&self) -> String { + format!("starting deployment process for {:?}", self.path) + } + + fn get_success_message(&self, output: &Self::SuccessData) -> String { + format!( + "{icon} successfully deployed typegraph {name} from {path:?}", + icon = "✓".green(), + name = output.get_typegraph_name().cyan(), + path = self.path, + ) + } + + fn get_failure_message(&self, output: &Self::FailureData) -> String { + format!( + "{icon} failed to deploy typegraph {name} from {path:?}: {err}", + icon = "✗".red(), + name = output.get_typegraph_name().cyan(), + path = self.path, + err = output.error, + ) + } + + fn get_error_message(&self, err: &str) -> String { + format!( + "{icon} failed to deploy typegraph(s) from {path:?}: {err}", + icon = "✗".red(), + path = self.path, + err = err, + ) + } } } diff --git a/meta-cli/src/deploy/actors/task_manager.rs b/meta-cli/src/deploy/actors/task_manager.rs index 53a2cb1278..129e45afd7 100644 --- a/meta-cli/src/deploy/actors/task_manager.rs +++ b/meta-cli/src/deploy/actors/task_manager.rs @@ -18,6 +18,9 @@ use super::{ }, }; +pub mod report; +pub use report::Report; + pub mod message { use super::*; @@ -84,18 +87,6 @@ pub enum StopReason { Error, } -#[derive(Debug)] -pub struct ReportEntry { - pub path: Arc, - pub status: TaskFinishStatus, -} - -#[derive(Debug)] -pub struct Report { - pub stop_reason: StopReason, - pub entries: Vec>, -} - pub struct TaskManager { action_generator: A::Generator, active_tasks: HashMap, Addr>>, @@ -161,7 +152,7 @@ impl Actor for TaskManager { .unwrap_or_log(), entries: std::mem::take(&mut self.reports) .into_iter() - .map(|(path, status)| ReportEntry { path, status }) + .map(|(path, status)| report::ReportEntry { path, status }) .collect(), }; @@ -242,21 +233,20 @@ impl Handler> for TaskManager { fn handle(&mut self, message: UpdateTaskStatus, ctx: &mut Context) -> Self::Result { match message { - UpdateTaskStatus::Started { - path: typegraph_path, - addr, - } => { - // TODO remove + UpdateTaskStatus::Started { .. } => { + // TODO remove - unused } UpdateTaskStatus::Finished { path: typegraph_path, - status: _, // for report + status, } => { self.active_tasks.remove(&typegraph_path); + self.reports.insert(typegraph_path.clone(), status); if self.active_tasks.is_empty() { match self.stop_reason { Some(StopReason::Natural | StopReason::Manual) => { - self.console.info("all tasks finished".to_string()); + self.console.debug("all tasks finished".to_string()); + ctx.stop(); } _ => {} diff --git a/meta-cli/src/deploy/actors/task_manager/report.rs b/meta-cli/src/deploy/actors/task_manager/report.rs new file mode 100644 index 0000000000..62d3bee6fd --- /dev/null +++ b/meta-cli/src/deploy/actors/task_manager/report.rs @@ -0,0 +1,55 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + +use super::StopReason; +use crate::deploy::actors::task::{action::TaskAction, TaskFinishStatus}; +use color_eyre::owo_colors::OwoColorize; +use std::{path::Path, sync::Arc}; + +#[derive(Debug)] +pub struct ReportEntry { + pub path: Arc, + pub status: TaskFinishStatus, +} + +#[derive(Debug)] +pub struct Report { + pub stop_reason: StopReason, + pub entries: Vec>, +} + +#[derive(Default, Debug)] +pub struct ReportSummary { + pub text: String, + pub success: bool, +} + +impl Report { + pub fn summary(&self) -> ReportSummary { + self.entries + .iter() + .fold(Default::default(), |mut summary, entry| { + let (text, success) = match &entry.status { + TaskFinishStatus::::Finished(results) => { + let success_count = results.iter().filter(|res| res.is_ok()).count(); + ( + format!( + " - {}: {}/{} success\n", + entry.path.display().to_string().yellow(), + success_count, + results.len() + ), + success_count == results.len(), + ) + } + TaskFinishStatus::::Error => (" - failed\n".to_string(), false), + TaskFinishStatus::::Cancelled => (" - cancelled\n".to_string(), true), + }; + summary.text.push_str(&text); + ReportSummary { + text: summary.text, + success: summary.success && success, + } + }) + } +} diff --git a/typegraph/node/sdk/src/log.ts b/typegraph/node/sdk/src/log.ts new file mode 100644 index 0000000000..c88a1bb8b9 --- /dev/null +++ b/typegraph/node/sdk/src/log.ts @@ -0,0 +1,46 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + +import { inspect } from "node:util"; + +/** + * see: module level documentation `meta-cli/src/deploy/actors/task.rs` + */ + +function getOutput(args: any[]) { + return args.map((arg) => { + if (typeof arg === "string") return arg; + return inspect(arg, { + colors: process.stdout.isTTY, + depth: 10, + maxStringLength: 1000, + maxArrayLength: 20, + }); + }).join(" "); +} + +export const log = { + debug(...args: any[]) { + const output = getOutput(args); + process.stdout.write(`debug: ${output}\n`); + }, + info(...args: any[]) { + const output = getOutput(args); + process.stdout.write(`info: ${output}\n`); + }, + warn(...args: any[]) { + const output = getOutput(args); + process.stdout.write(`warning: ${output}\n`); + }, + error(...args: any[]) { + const output = getOutput(args); + process.stdout.write(`error: ${output}\n`); + }, + + failure(data: any) { + process.stdout.write(`failure: ${JSON.stringify(data)}\n`); + }, + success(data: any) { + process.stdout.write(`success: ${JSON.stringify(data)}\n`); + }, +}; diff --git a/typegraph/node/sdk/src/tg_artifact_upload.ts b/typegraph/node/sdk/src/tg_artifact_upload.ts index 7a9da66c29..ba31c046cd 100644 --- a/typegraph/node/sdk/src/tg_artifact_upload.ts +++ b/typegraph/node/sdk/src/tg_artifact_upload.ts @@ -5,6 +5,8 @@ import { BasicAuth } from "./tg_deploy.js"; import { Artifact } from "./gen/interfaces/metatype-typegraph-core.js"; import { dirname, join } from "node:path"; import * as fsp from "node:fs/promises"; +import { log } from "./log.js"; +import { execRequest } from "./utils/func_utils.js"; interface UploadArtifactMeta { typegraphName: string; @@ -32,11 +34,12 @@ export class ArtifactUploader { artifactMetas: UploadArtifactMeta[], ): Promise> { const artifactsJson = JSON.stringify(artifactMetas); - const response = await fetch(this.getUploadUrl, { + const response = await execRequest(this.getUploadUrl, { method: "POST", headers: this.headers, body: artifactsJson, - }); + }, `tgDeploy failed to get upload urls`); + log.debug("response"); if (!response.ok) { const err = await response.text(); @@ -72,21 +75,32 @@ export class ArtifactUploader { return; } + const urlObj = new URL(this.getUploadUrl); + const altUrlObj = new URL(url); + urlObj.pathname = altUrlObj.pathname; + urlObj.search = altUrlObj.search; + const path = join(dirname(this.tgPath), meta.relativePath); // TODO: stream const content = await fsp.readFile(path); - const res = await fetch(url, { + log.info("uploading artifact", meta.relativePath, urlObj.href); + const res = await execRequest(urlObj, { method: "POST", headers: uploadHeaders, body: new Uint8Array(content), - } as RequestInit); + } as RequestInit, `failed to upload artifact ${meta.relativePath}`); if (!res.ok) { - const err = await res.text(); + const err = await res.json(); + // To be read by the CLI? + log.error("Failed to upload artifact", meta.relativePath, err); + console.log(err); throw new Error( - `Failed to upload artifact '${path}' (${res.status}): ${err}`, + `Failed to upload artifact '${path}' (${res.status}): ${err.error}`, ); } - return res.json(); + const ret = res.json(); + log.info(`Successfully uploaded artifact`, meta.relativePath); + return ret; } private getMetas(artifacts: Artifact[]): UploadArtifactMeta[] { @@ -130,6 +144,7 @@ export class ArtifactUploader { const artifactMetas = this.getMetas(this.refArtifacts); const uploadUrls = await this.fetchUploadUrls(artifactMetas); + log.debug("upload urls:", uploadUrls); const results = await Promise.allSettled( uploadUrls.map( async (url, i) => { diff --git a/typegraph/node/sdk/src/tg_deploy.ts b/typegraph/node/sdk/src/tg_deploy.ts index 9b15fe7d55..9b6534e4a3 100644 --- a/typegraph/node/sdk/src/tg_deploy.ts +++ b/typegraph/node/sdk/src/tg_deploy.ts @@ -5,6 +5,7 @@ import { ArtifactResolutionConfig } from "./gen/interfaces/metatype-typegraph-co import { ArtifactUploader } from "./tg_artifact_upload.js"; import { TypegraphOutput } from "./typegraph.js"; import { wit_utils } from "./wit.js"; +import { execRequest } from "./utils/func_utils.js"; export class BasicAuth { constructor(public username: string, public password: string) { @@ -79,7 +80,7 @@ export async function tgDeploy( tg: tgJson, secrets: Object.entries(secrets ?? {}), }), - }); + }, `tgDeploy failed to deploy typegraph ${typegraph.name}`); return { serialized: tgJson, @@ -103,23 +104,7 @@ export async function tgRemove( method: "POST", headers, body: wit_utils.gqlRemoveQuery([typegraph.name]), - }); + }, `tgRemove failed to remove typegraph ${typegraph.name}`); return { typegate: response }; } - -/** - * Simple fetch wrapper with more verbose errors - */ -async function execRequest(url: URL, reqInit: RequestInit) { - try { - const response = await fetch(url, reqInit); - if (response.headers.get("Content-Type") == "application/json") { - return await response.json(); - } - throw Error(`expected json object, got "${await response.text()}"`); - } catch (err) { - const message = err instanceof Error ? err.message : err; - throw Error(`error executing request to ${url.toString()}: ${message}`); - } -} diff --git a/typegraph/node/sdk/src/tg_manage.ts b/typegraph/node/sdk/src/tg_manage.ts index 4ffc18536b..1fcc4ea506 100644 --- a/typegraph/node/sdk/src/tg_manage.ts +++ b/typegraph/node/sdk/src/tg_manage.ts @@ -6,9 +6,10 @@ import { BasicAuth, tgDeploy } from "./tg_deploy.js"; import { TgFinalizationResult, TypegraphOutput } from "./typegraph.js"; import { getEnvVariable } from "./utils/func_utils.js"; import { freezeTgOutput } from "./utils/func_utils.js"; +import { log } from "./log.js"; -const PORT = "META_CLI_SERVER_PORT"; // meta-cli instance that executes the current file -const SELF_PATH = "META_CLI_TG_PATH"; // path to the current file to uniquely identify the run results +const PORT = "MCLI_SERVER_PORT"; // meta-cli instance that executes the current file +const SELF_PATH = "MCLI_TG_PATH"; // path to the current file to uniquely identify the run results type Command = "serialize" | "deploy" | "codegen"; @@ -113,14 +114,19 @@ export class Manager { prefix: config.prefix, }); } catch (err: any) { - return await this.#relayErrorToCLI( - "serialize", - "serialization_err", - err?.message ?? "error serializing typegraph", - { - err, - }, - ); + log.failure({ + typegraph: this.#typegraph.name, + error: err?.message ?? "failed to serialize typegraph", + }); + return; + // return await this.#relayErrorToCLI( + // "serialize", + // "serialization_err", + // err?.message ?? "error serializing typegraph", + // { + // err, + // }, + // ); } await this.#relayResultToCLI( "serialize", @@ -150,14 +156,19 @@ export class Manager { try { frozenSerialized = frozenOut.serialize(config); } catch (err: any) { - return await this.#relayErrorToCLI( - "deploy", - "serialization_err", - err?.message ?? "error serializing typegraph", - { - err, - }, - ); + log.failure({ + typegraph: this.#typegraph.name, + error: err?.message ?? "failed to serialize typegraph", + }); + return; + // return await this.#relayErrorToCLI( + // "deploy", + // "serialization_err", + // err?.message ?? "error serializing typegraph", + // { + // err, + // }, + // ); } const reusableTgOutput = { ...this.#typegraph, @@ -182,20 +193,23 @@ export class Manager { }); deployRes = typegate; } catch (err: any) { - return await this.#relayErrorToCLI( - "deploy", - "deploy_err", - err?.message ?? "error deploying typegraph to typegate", - { - err, - ...(err.cause ? { cause: err.cause } : {}), - }, - ); + log.failure({ + typegraph: this.#typegraph.name, + error: err?.message ?? "failed to deploy typegraph", + }); + return; + // return await this.#relayErrorToCLI( + // "deploy", + // "deploy_err", + // err?.message ?? "error deploying typegraph to typegate", + // { + // err, + // ...(err.cause ? { cause: err.cause } : {}), + // }, + // ); } - await this.#relayResultToCLI( - "deploy", - deployRes, - ); + log.debug("deploy result", { deployRes }); + log.success({ typegraph: this.#typegraph.name }); } async #relayResultToCLI(initiator: Command, data: T) { diff --git a/typegraph/node/sdk/src/typegraph.ts b/typegraph/node/sdk/src/typegraph.ts index dcdf131cb4..bcffdbb2d9 100644 --- a/typegraph/node/sdk/src/typegraph.ts +++ b/typegraph/node/sdk/src/typegraph.ts @@ -17,6 +17,7 @@ import { ArtifactResolutionConfig, } from "./gen/interfaces/metatype-typegraph-core.js"; import { Manager } from "./tg_manage.js"; +import { log } from "./log.js"; type Exports = Record; @@ -219,7 +220,9 @@ export async function typegraph( ...InjectionSource, }; + log.debug("builder"); builder(g); + log.debug("builder: ok"); const ret = { serialize(config: ArtifactResolutionConfig) { diff --git a/typegraph/node/sdk/src/utils/func_utils.ts b/typegraph/node/sdk/src/utils/func_utils.ts index 0dcd5c5bad..72e91ce5af 100644 --- a/typegraph/node/sdk/src/utils/func_utils.ts +++ b/typegraph/node/sdk/src/utils/func_utils.ts @@ -9,6 +9,7 @@ import { import { ReducePath } from "../gen/interfaces/metatype-typegraph-utils.js"; import { serializeStaticInjection } from "./injection_utils.js"; import { ArtifactResolutionConfig } from "../gen/interfaces/metatype-typegraph-core.js"; +import { log } from "../log.js"; export function stringifySymbol(symbol: symbol) { const name = symbol.toString().match(/\((.+)\)/)?.[1]; @@ -99,3 +100,26 @@ export function freezeTgOutput( serialize: () => frozenMemo[tgOutput.name], }; } + +/** + * Simple fetch wrapper with more verbose errors + */ +export async function execRequest( + url: URL, + reqInit: RequestInit, + errMsg: string, +) { + try { + const response = await fetch(url, reqInit); + if (response.headers.get("Content-Type") == "application/json") { + return await response.json(); + } + throw Error( + `${errMsg}: expected json object, got "${await response.text()}"`, + ); + } catch (err) { + log.debug("fetch error", { url, requestInit: reqInit, error: err }); + const message = err instanceof Error ? err.message : err; + throw Error(`${errMsg}: ${message}`); + } +} From af6066b72b28175b04678cfb68f237216e7f0038 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Thu, 30 May 2024 16:07:52 +0300 Subject: [PATCH 05/35] fix summary, TS sdk fetch --- meta-cli/src/cli/deploy.rs | 4 +- meta-cli/src/deploy/actors/discovery.rs | 3 +- .../src/deploy/actors/task_manager/report.rs | 40 ++++++++++--------- typegraph/node/sdk/src/tg_artifact_upload.ts | 22 +++++----- typegraph/node/sdk/src/utils/func_utils.ts | 6 +++ 5 files changed, 42 insertions(+), 33 deletions(-) diff --git a/meta-cli/src/cli/deploy.rs b/meta-cli/src/cli/deploy.rs index 5ada7ae8bc..fb6b6e7922 100644 --- a/meta-cli/src/cli/deploy.rs +++ b/meta-cli/src/cli/deploy.rs @@ -299,11 +299,9 @@ mod default_mode { ) }; - // let stopped = loader::stopped(loader); - // self.handle_loaded_typegraphs().await??; let report = self.report_rx.await?; let summary = report.summary(); - self.console.info(format!("Result:\n{}", summary.text)); + println!("Result:\n{}", summary.text); match report.stop_reason { StopReason::Natural => { diff --git a/meta-cli/src/deploy/actors/discovery.rs b/meta-cli/src/deploy/actors/discovery.rs index f8e0c1e7ad..a125b5a371 100644 --- a/meta-cli/src/deploy/actors/discovery.rs +++ b/meta-cli/src/deploy/actors/discovery.rs @@ -1,6 +1,7 @@ -use crate::deploy::actors::task_manager::{self, TaskReason}; // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 + +use crate::deploy::actors::task_manager::{self, TaskReason}; use crate::interlude::*; use pathdiff::diff_paths; diff --git a/meta-cli/src/deploy/actors/task_manager/report.rs b/meta-cli/src/deploy/actors/task_manager/report.rs index 62d3bee6fd..bbc5e79826 100644 --- a/meta-cli/src/deploy/actors/task_manager/report.rs +++ b/meta-cli/src/deploy/actors/task_manager/report.rs @@ -18,7 +18,7 @@ pub struct Report { pub entries: Vec>, } -#[derive(Default, Debug)] +#[derive(Debug)] pub struct ReportSummary { pub text: String, pub success: bool, @@ -26,30 +26,34 @@ pub struct ReportSummary { impl Report { pub fn summary(&self) -> ReportSummary { - self.entries - .iter() - .fold(Default::default(), |mut summary, entry| { + self.entries.iter().fold( + ReportSummary { + text: String::new(), + success: true, + }, + |mut summary, entry| { let (text, success) = match &entry.status { TaskFinishStatus::::Finished(results) => { let success_count = results.iter().filter(|res| res.is_ok()).count(); ( - format!( - " - {}: {}/{} success\n", - entry.path.display().to_string().yellow(), - success_count, - results.len() - ), + format!("{}/{} success", success_count, results.len()), success_count == results.len(), ) } - TaskFinishStatus::::Error => (" - failed\n".to_string(), false), - TaskFinishStatus::::Cancelled => (" - cancelled\n".to_string(), true), + TaskFinishStatus::::Error => ("failed".to_string(), false), + TaskFinishStatus::::Cancelled => ("cancelled".to_string(), true), }; - summary.text.push_str(&text); - ReportSummary { - text: summary.text, - success: summary.success && success, - } - }) + summary.text.push_str( + format!( + " - {}: {}\n", + entry.path.display().to_string().yellow(), + text + ) + .as_str(), + ); + summary.success = summary.success && success; + summary + }, + ) } } diff --git a/typegraph/node/sdk/src/tg_artifact_upload.ts b/typegraph/node/sdk/src/tg_artifact_upload.ts index ba31c046cd..22c5ee7fd7 100644 --- a/typegraph/node/sdk/src/tg_artifact_upload.ts +++ b/typegraph/node/sdk/src/tg_artifact_upload.ts @@ -34,21 +34,21 @@ export class ArtifactUploader { artifactMetas: UploadArtifactMeta[], ): Promise> { const artifactsJson = JSON.stringify(artifactMetas); - const response = await execRequest(this.getUploadUrl, { + const uploadUrls: Array = await execRequest(this.getUploadUrl, { method: "POST", headers: this.headers, body: artifactsJson, }, `tgDeploy failed to get upload urls`); - log.debug("response"); - if (!response.ok) { - const err = await response.text(); - throw new Error( - `Failed requesting artifact upload URLs: ${response.status} - ${err}`, - ); - } + // if (!response.ok) { + // log.debug("response", response); + // const err = await response.text(); + // throw new Error( + // `Failed requesting artifact upload URLs: ${response.status} - ${err}`, + // ); + // } - const uploadUrls: Array = await response.json(); + // const uploadUrls: Array = await response.json(); if (uploadUrls.length !== artifactMetas.length) { const diff = `array length mismatch: ${uploadUrls.length} !== ${artifactMetas.length}`; @@ -71,7 +71,7 @@ export class ArtifactUploader { } if (url == null) { - // console.error(`Skipping upload for artifact: ${meta.relativePath}`); + log.info("skipping artifact upload:", meta.relativePath); return; } @@ -144,7 +144,7 @@ export class ArtifactUploader { const artifactMetas = this.getMetas(this.refArtifacts); const uploadUrls = await this.fetchUploadUrls(artifactMetas); - log.debug("upload urls:", uploadUrls); + log.debug("upload urls", uploadUrls); const results = await Promise.allSettled( uploadUrls.map( async (url, i) => { diff --git a/typegraph/node/sdk/src/utils/func_utils.ts b/typegraph/node/sdk/src/utils/func_utils.ts index 72e91ce5af..aa4b3e545d 100644 --- a/typegraph/node/sdk/src/utils/func_utils.ts +++ b/typegraph/node/sdk/src/utils/func_utils.ts @@ -111,9 +111,15 @@ export async function execRequest( ) { try { const response = await fetch(url, reqInit); + if (!response.ok) { + log.debug("error", response.json()); + throw Error(`${errMsg}: request failed with status ${response.status} (${response.statusText})`) + } + if (response.headers.get("Content-Type") == "application/json") { return await response.json(); } + log.debug("response", response); throw Error( `${errMsg}: expected json object, got "${await response.text()}"`, ); From 4c3abccec5dfce69410a0503b523ac3de7dbc0eb Mon Sep 17 00:00:00 2001 From: Natoandro Date: Thu, 30 May 2024 21:19:32 +0300 Subject: [PATCH 06/35] use relative typegraph paths for display --- meta-cli/src/cli/deploy.rs | 17 +++++++++-------- meta-cli/src/deploy/actors/discovery.rs | 6 +----- meta-cli/src/deploy/actors/task/action.rs | 4 ++-- meta-cli/src/deploy/actors/task/command.rs | 2 +- meta-cli/src/deploy/actors/task_manager.rs | 7 ++++++- 5 files changed, 19 insertions(+), 17 deletions(-) diff --git a/meta-cli/src/cli/deploy.rs b/meta-cli/src/cli/deploy.rs index fb6b6e7922..943b838373 100644 --- a/meta-cli/src/cli/deploy.rs +++ b/meta-cli/src/cli/deploy.rs @@ -108,7 +108,7 @@ pub struct Deploy { impl Deploy { #[tracing::instrument(level = "debug")] pub async fn new(deploy: &DeploySubcommand, args: &ConfigArgs) -> Result { - let dir = args.dir(); + let dir: Arc = args.dir().into(); let config_path = args.config.clone(); let config = Arc::new(Config::load_or_find(config_path, &dir)?); @@ -138,12 +138,13 @@ impl Deploy { ServerStore::set_prefix(node_config.prefix); ServerStore::set_codegen_flag(deploy.options.codegen); - let file = deploy - .file - .as_ref() - .map(|f| f.normalize()) - .transpose()? - .map(|f| f.into_path_buf()); + let file = deploy.file.clone(); + // let file = deploy + // .file + // .as_ref() + // .map(|f| f.normalize()) + // .transpose()? + // .map(|f| f.into_path_buf()); if let Some(file) = &file { if let Err(err) = crate::config::ModuleType::try_from(file.as_path()) { bail!("file is not a valid module type: {err:#}") @@ -151,7 +152,7 @@ impl Deploy { } Ok(Self { config, - base_dir: dir.into(), + base_dir: dir.clone(), options, secrets, file: file.map(|path| path.into()), diff --git a/meta-cli/src/deploy/actors/discovery.rs b/meta-cli/src/deploy/actors/discovery.rs index a125b5a371..fc801c9254 100644 --- a/meta-cli/src/deploy/actors/discovery.rs +++ b/meta-cli/src/deploy/actors/discovery.rs @@ -56,12 +56,8 @@ impl Actor for DiscoveryActor { .start(|path| match path { Ok(path) => { let rel_path = diff_paths(&path, &dir).unwrap(); - console.debug(format!( - "Found typegraph definition module at {}", - rel_path.display() - )); task_manager.do_send(task_manager::message::AddTask { - path: path.into(), + path: rel_path.into(), reason: TaskReason::Discovery, }); } diff --git a/meta-cli/src/deploy/actors/task/action.rs b/meta-cli/src/deploy/actors/task/action.rs index be8e5d9d84..1fa486e23d 100644 --- a/meta-cli/src/deploy/actors/task/action.rs +++ b/meta-cli/src/deploy/actors/task/action.rs @@ -110,7 +110,7 @@ mod serialize { async fn get_command(&self) -> Result { CommandBuilder { - path: self.path.clone(), + path: self.task_config.base_dir.to_path_buf().join(&self.path), task_config: self.task_config.clone(), action_env: "serialize", } @@ -230,7 +230,7 @@ mod deploy { async fn get_command(&self) -> Result { CommandBuilder { - path: self.path.clone(), + path: self.task_config.base_dir.to_path_buf().join(&self.path), task_config: self.task_config.clone(), action_env: "deploy", } diff --git a/meta-cli/src/deploy/actors/task/command.rs b/meta-cli/src/deploy/actors/task/command.rs index 04b856eda2..d9ec0e4f0d 100644 --- a/meta-cli/src/deploy/actors/task/command.rs +++ b/meta-cli/src/deploy/actors/task/command.rs @@ -9,7 +9,7 @@ use std::{path::Path, sync::Arc}; use tokio::process::Command; pub(super) struct CommandBuilder { - pub path: Arc, + pub path: PathBuf, pub task_config: Arc, pub action_env: &'static str, } diff --git a/meta-cli/src/deploy/actors/task_manager.rs b/meta-cli/src/deploy/actors/task_manager.rs index 129e45afd7..5ce0676fc4 100644 --- a/meta-cli/src/deploy/actors/task_manager.rs +++ b/meta-cli/src/deploy/actors/task_manager.rs @@ -270,7 +270,12 @@ impl Handler for TaskManager { self.stop_reason = Some(StopReason::ManualForced); ctx.address().do_send(ForceStop); } - StopReason::ManualForced | StopReason::Error => {} + StopReason::ManualForced => { + self.console + .warning("stopping the task manager".to_string()); + ctx.stop(); + } + StopReason::Error => {} }, None => { self.stop_reason = Some(StopReason::Manual); From 1e8b9ce4f8deebf3698af32b2da1ca1c57eaa51e Mon Sep 17 00:00:00 2001 From: Natoandro Date: Thu, 30 May 2024 21:20:40 +0300 Subject: [PATCH 07/35] (python sdk) add logging --- examples/metatype.yaml | 3 ++ typegraph/node/sdk/src/typegraph.ts | 15 ++++----- .../typegraph/graph/tg_artifact_upload.py | 33 ++++++++++++------- typegraph/python/typegraph/graph/tg_manage.py | 29 +++++++++------- typegraph/python/typegraph/log.py | 30 +++++++++++++++++ 5 files changed, 78 insertions(+), 32 deletions(-) create mode 100644 typegraph/python/typegraph/log.py diff --git a/examples/metatype.yaml b/examples/metatype.yaml index ac0a186b0a..3a61cda757 100644 --- a/examples/metatype.yaml +++ b/examples/metatype.yaml @@ -141,6 +141,9 @@ typegraphs: materializers: prisma: migrations_path: "migrations" + python: + exclude: + - "**/*" metagen: targets: diff --git a/typegraph/node/sdk/src/typegraph.ts b/typegraph/node/sdk/src/typegraph.ts index bcffdbb2d9..682ab08371 100644 --- a/typegraph/node/sdk/src/typegraph.ts +++ b/typegraph/node/sdk/src/typegraph.ts @@ -17,7 +17,6 @@ import { ArtifactResolutionConfig, } from "./gen/interfaces/metatype-typegraph-core.js"; import { Manager } from "./tg_manage.js"; -import { log } from "./log.js"; type Exports = Record; @@ -35,23 +34,23 @@ interface TypegraphArgs { } export class ApplyFromArg { - constructor(public name: string | null, public type: number | null) {} + constructor(public name: string | null, public type: number | null) { } } export class ApplyFromStatic { - constructor(public value: any) {} + constructor(public value: any) { } } export class ApplyFromSecret { - constructor(public key: string) {} + constructor(public key: string) { } } export class ApplyFromContext { - constructor(public key: string | null, public type: number | null) {} + constructor(public key: string | null, public type: number | null) { } } export class ApplyFromParent { - constructor(public typeName: string) {} + constructor(public typeName: string) { } } const InjectionSource = { @@ -114,7 +113,7 @@ export class InheritDef { export type TypegraphBuilder = (g: TypegraphBuilderArgs) => void; export class RawAuth { - constructor(readonly jsonStr: string) {} + constructor(readonly jsonStr: string) { } } export interface TypegraphOutput { @@ -220,9 +219,7 @@ export async function typegraph( ...InjectionSource, }; - log.debug("builder"); builder(g); - log.debug("builder: ok"); const ret = { serialize(config: ArtifactResolutionConfig) { diff --git a/typegraph/python/typegraph/graph/tg_artifact_upload.py b/typegraph/python/typegraph/graph/tg_artifact_upload.py index a324ad9291..880aa34af1 100644 --- a/typegraph/python/typegraph/graph/tg_artifact_upload.py +++ b/typegraph/python/typegraph/graph/tg_artifact_upload.py @@ -6,12 +6,13 @@ import sys from dataclasses import dataclass from typing import Any, Dict, List, Optional, Union -from urllib import request +from urllib import request, parse as Url from urllib.error import HTTPError from typegraph.gen.exports.core import Artifact from typegraph.gen.types import Err, Ok, Result from typegraph.graph.shared_types import BasicAuth +from typegraph import log @dataclass @@ -40,6 +41,7 @@ def __init__( headers: Dict[str, str], tg_path: str, ) -> None: + self.base_url = base_url self.artifacts = artifacts self.tg_name = tg_name sep = "/" if not base_url.endswith("/") else "" @@ -64,10 +66,10 @@ def __fetch_upload_urls( try: response = request.urlopen(req) except HTTPError as e: - raise Exception(f"Failed requesting artifact upload URLs: {e}") + raise Exception(f"failed to get upload URLs: {e}") if response.status != 200: - raise Exception(f"Failed requesting artifact upload URLs: {response}") + raise Exception(f"failed to get upload URLs: {response}") response = handle_response(response.read().decode()) return response @@ -83,7 +85,7 @@ def __upload( upload_headers["Authorization"] = self.auth.as_header_value() if url is None: - # print(f"Skipping upload for artifact: {meta.relativePath}", file=sys.stderr) + log.info("skipping artifact upload:", meta.relativePath) return Ok(None) if self.tg_path is None: @@ -94,8 +96,18 @@ def __upload( with open(path, "rb") as file: content = file.read() + # TODO temporary + parsed_upload_url = Url.urlparse(url) + parsed_url = Url.urlparse(self.base_url) + parsed_url = parsed_url._replace( + path=parsed_upload_url.path, query=parsed_upload_url.query + ) + + rebased_url = Url.urlunparse(parsed_url) + + log.info("uploading artifact", meta.relativePath, rebased_url) upload_req = request.Request( - url=url, + url=rebased_url, method="POST", data=content, headers=upload_headers, @@ -103,15 +115,13 @@ def __upload( try: response = request.urlopen(upload_req) except HTTPError as e: + log.debug(e) errmsg = json.load(e.fp).get("error", None) - - print(f"Failed to upload artifact {path}: {e}", file=sys.stderr) - print(f" - {errmsg}", file=sys.stderr) - print(f" - url={url}", file=sys.stderr) - raise e + raise Exception(errmsg) if response.status != 201: - raise Exception(f"Failed to upload artifact {path} {response.status}") + raise Exception(f"failed to upload artifact {path} {response.status}") + # TODO why?? return handle_response(response.read().decode()) def get_metas(self, artifacts: List[Artifact]) -> List[UploadArtifactMeta]: @@ -150,6 +160,7 @@ def upload_artifacts( artifact_metas = self.get_metas(self.artifacts) upload_urls = self.__fetch_upload_urls(artifact_metas) + log.debug("upload urls", upload_urls) results = [] for i in range(len(artifact_metas)): diff --git a/typegraph/python/typegraph/graph/tg_manage.py b/typegraph/python/typegraph/graph/tg_manage.py index 364b70a041..24391e57ab 100644 --- a/typegraph/python/typegraph/graph/tg_manage.py +++ b/typegraph/python/typegraph/graph/tg_manage.py @@ -3,6 +3,7 @@ import json import os +import traceback from dataclasses import dataclass from enum import Enum from typing import Dict, Union, Any @@ -16,10 +17,11 @@ from typegraph.graph.shared_types import BasicAuth, TypegraphOutput from typegraph.graph.tg_deploy import TypegraphDeployParams, tg_deploy from typegraph.utils import freeze_tg_output +from typegraph import log -PORT = "META_CLI_SERVER_PORT" # meta-cli instance that executes the current file +PORT = "MCLI_SERVER_PORT" # meta-cli instance that executes the current file SELF_PATH = ( - "META_CLI_TG_PATH" # path to the current file to uniquely identify the run results + "MCLI_TG_PATH" # path to the current file to uniquely identify the run results ) @@ -85,9 +87,10 @@ def serialize(self, config: CLIConfigRequest): ) # prefix from cli overrides the current value res = self.typegraph.serialize(artifact_cfg) except Exception as err: - return self.relay_error_to_cli( - Command.SERIALIZE, code="serialization_err", msg=str(err), value={} - ) + log.debug(traceback.format_exc()) + log.failure({"typegraph": self.typegraph.name, "error": str(err)}) + return + return self.relay_data_to_cli(Command.SERIALIZE, data=json.loads(res.tgJson)) def deploy(self, config: CLIConfigRequest): @@ -112,9 +115,10 @@ def deploy(self, config: CLIConfigRequest): try: frozen_serialized = frozen_out.serialize(artifacts_config) except Exception as err: - return self.relay_error_to_cli( - Command.DEPLOY, code="serialization_err", msg=str(err), value={} - ) + log.debug(traceback.format_exc()) + log.failure({"typegraph": self.typegraph.name, "error": str(err)}) + return + if artifacts_config.codegen: self.relay_data_to_cli( initiator=Command.CODEGEN, data=json.loads(frozen_serialized.tgJson) @@ -123,11 +127,12 @@ def deploy(self, config: CLIConfigRequest): try: ret = tg_deploy(frozen_out, params) except Exception as err: - return self.relay_error_to_cli( - Command.DEPLOY, code="deploy_err", msg=str(err), value={} - ) + log.debug(traceback.format_exc()) + log.failure({"typegraph": self.typegraph.name, "error": str(err)}) + return - return self.relay_data_to_cli(Command.DEPLOY, data=ret.typegate) + log.debug("deploy result", {"deployResult": ret.typegate}) + log.success({"typegraph": self.typegraph.name}) def request_command(self) -> CLIServerResponse: config = self.request_config() diff --git a/typegraph/python/typegraph/log.py b/typegraph/python/typegraph/log.py new file mode 100644 index 0000000000..49b63dce78 --- /dev/null +++ b/typegraph/python/typegraph/log.py @@ -0,0 +1,30 @@ +from typing import Any +import json + + +def __format(*largs: Any): + return " ".join(map(str, largs)) + + +def debug(*largs: Any): + print("debug:", __format(*largs)) + + +def info(*largs: Any): + print("info:", __format(*largs)) + + +def warn(*largs: Any): + print("warning:", __format(*largs)) + + +def error(*largs: Any): + print("error:", __format(*largs)) + + +def failure(data: Any): + print("failure:", json.dumps(data)) + + +def success(data: Any): + print("success:", json.dumps(data)) From a8c2b36338c463eeca85e00f8319cdc76b66d428 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Fri, 31 May 2024 18:27:53 +0300 Subject: [PATCH 08/35] unpack migrations --- .../db/20240531152240_generated/migration.sql | 34 +++ .../roadmap-policies/db/migration_lock.toml | 3 + meta-cli/src/cli/deploy.rs | 12 +- meta-cli/src/config.rs | 11 +- meta-cli/src/deploy/actors/task.rs | 61 +++-- meta-cli/src/deploy/actors/task/action.rs | 255 +----------------- meta-cli/src/deploy/actors/task/deploy.rs | 179 ++++++++++++ meta-cli/src/deploy/actors/task/serialize.rs | 125 +++++++++ meta-cli/src/deploy/actors/task_manager.rs | 13 +- meta-cli/src/deploy/actors/watcher.rs | 21 +- meta-cli/src/deploy/push/pusher.rs | 2 +- typegraph/node/sdk/src/tg_deploy.ts | 11 +- typegraph/node/sdk/src/tg_manage.ts | 17 +- 13 files changed, 434 insertions(+), 310 deletions(-) create mode 100644 examples/migrations/roadmap-policies/db/20240531152240_generated/migration.sql create mode 100644 examples/migrations/roadmap-policies/db/migration_lock.toml create mode 100644 meta-cli/src/deploy/actors/task/deploy.rs create mode 100644 meta-cli/src/deploy/actors/task/serialize.rs diff --git a/examples/migrations/roadmap-policies/db/20240531152240_generated/migration.sql b/examples/migrations/roadmap-policies/db/20240531152240_generated/migration.sql new file mode 100644 index 0000000000..cad0ad9dc9 --- /dev/null +++ b/examples/migrations/roadmap-policies/db/20240531152240_generated/migration.sql @@ -0,0 +1,34 @@ +-- CreateTable +CREATE TABLE "bucket" ( + "id" SERIAL NOT NULL, + "name" TEXT NOT NULL, + + CONSTRAINT "bucket_pkey" PRIMARY KEY ("id") +); + +-- CreateTable +CREATE TABLE "idea" ( + "id" UUID NOT NULL, + "name" TEXT NOT NULL, + "authorEmail" TEXT NOT NULL, + "bucketId" INTEGER NOT NULL, + + CONSTRAINT "idea_pkey" PRIMARY KEY ("id") +); + +-- CreateTable +CREATE TABLE "vote" ( + "id" UUID NOT NULL, + "authorEmail" TEXT NOT NULL, + "importance" TEXT, + "desc" TEXT, + "ideaId" UUID NOT NULL, + + CONSTRAINT "vote_pkey" PRIMARY KEY ("id") +); + +-- AddForeignKey +ALTER TABLE "idea" ADD CONSTRAINT "idea_bucketId_fkey" FOREIGN KEY ("bucketId") REFERENCES "bucket"("id") ON DELETE RESTRICT ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "vote" ADD CONSTRAINT "vote_ideaId_fkey" FOREIGN KEY ("ideaId") REFERENCES "idea"("id") ON DELETE RESTRICT ON UPDATE CASCADE; diff --git a/examples/migrations/roadmap-policies/db/migration_lock.toml b/examples/migrations/roadmap-policies/db/migration_lock.toml new file mode 100644 index 0000000000..fbffa92c2b --- /dev/null +++ b/examples/migrations/roadmap-policies/db/migration_lock.toml @@ -0,0 +1,3 @@ +# Please do not edit this file manually +# It should be added in your version-control system (i.e. Git) +provider = "postgresql" \ No newline at end of file diff --git a/meta-cli/src/cli/deploy.rs b/meta-cli/src/cli/deploy.rs index 943b838373..f853813e4d 100644 --- a/meta-cli/src/cli/deploy.rs +++ b/meta-cli/src/cli/deploy.rs @@ -1,29 +1,23 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use self::actors::task::action::DeployAction; -use self::actors::task::action::DeployActionGenerator; +use self::actors::task::deploy::{DeployAction, DeployActionGenerator}; use self::actors::task::TaskConfig; use self::actors::task_manager::{self, StopReason, TaskReason}; use super::{Action, ConfigArgs, NodeArgs}; use crate::com::store::{Command, Endpoint, MigrationAction, ServerStore}; use crate::config::Config; use crate::deploy::actors; -use crate::deploy::actors::console::{Console, ConsoleActor}; +use crate::deploy::actors::console::ConsoleActor; use crate::deploy::actors::discovery::DiscoveryActor; -use crate::deploy::actors::loader::{self, LoaderEvent, ReloadModule, ReloadReason, StopBehavior}; -use crate::deploy::actors::task::action::TaskAction; use crate::deploy::actors::task_manager::TaskManager; use crate::deploy::actors::watcher::{self, WatcherActor}; -use crate::deploy::push::pusher::PushResult; use crate::interlude::*; use crate::secrets::{RawSecrets, Secrets}; use actix_web::dev::ServerHandle; use clap::Parser; use futures::channel::oneshot; -use normpath::PathExt; use owo_colors::OwoColorize; -use tokio::sync::mpsc; #[derive(Parser, Debug)] pub struct DeploySubcommand { @@ -253,6 +247,7 @@ mod default_mode { let action_generator = DeployActionGenerator::new(task_config); let task_manager = TaskManager::new( + deploy.config.clone(), action_generator, deploy.max_parallel_loads.unwrap_or_else(num_cpus::get), report_tx, @@ -436,6 +431,7 @@ mod watch_mode { let (report_tx, report_rx) = oneshot::channel(); let task_manager = TaskManager::new( + deploy.config.clone(), action_generator.clone(), deploy.max_parallel_loads.unwrap_or_else(num_cpus::get), report_tx, diff --git a/meta-cli/src/config.rs b/meta-cli/src/config.rs index 040922e565..5aec1c1f31 100644 --- a/meta-cli/src/config.rs +++ b/meta-cli/src/config.rs @@ -269,13 +269,10 @@ impl Config { } /// canonical path to the migration given the typegraph path - pub fn prisma_migration_dir_abs(&self, tg_path: &Path, tg_name: &str) -> Result { - if tg_path.is_dir() { - bail!("Given typegraph path {} is not a file", tg_path.display()); - } - let mut base = tg_path.to_path_buf().clone(); - base.pop(); // remove file - Ok(base.join(self.prisma_migrations_dir_rel(tg_name))) + pub fn prisma_migration_dir_abs(&self, tg_name: &str) -> PathBuf { + let mut path = self.base_dir.clone(); + path.push(self.prisma_migrations_dir_rel(tg_name)); + path } } diff --git a/meta-cli/src/deploy/actors/task.rs b/meta-cli/src/deploy/actors/task.rs index 6ad5b39c13..f0493cd43c 100644 --- a/meta-cli/src/deploy/actors/task.rs +++ b/meta-cli/src/deploy/actors/task.rs @@ -17,12 +17,16 @@ pub mod action; mod command; +pub mod deploy; +pub mod serialize; -use self::action::{ActionResult, TaskAction}; +use self::action::{ActionFinalizeContext, ActionResult, TaskAction}; use super::console::{Console, ConsoleActor}; use super::task_manager::{self, TaskManager}; -use crate::{com::server::get_instance_port, interlude::*}; -use actix::prelude::*; +use crate::com::server::get_instance_port; +use crate::config::Config; +use crate::interlude::*; +use color_eyre::owo_colors::OwoColorize; use common::typegraph::Typegraph; use process_wrap::tokio::TokioChildWrapper; use std::time::Duration; @@ -96,6 +100,7 @@ pub enum TaskFinishStatus { } pub struct TaskActor { + config: Arc, action: A, process: Option>, task_manager: Addr>, @@ -108,8 +113,14 @@ impl TaskActor where A: TaskAction, { - pub fn new(action: A, task_manager: Addr>, console: Addr) -> Self { + pub fn new( + config: Arc, + action: A, + task_manager: Addr>, + console: Addr, + ) -> Self { Self { + config, process: None, task_manager, console, @@ -233,7 +244,9 @@ impl Handler for TaskActor { let fut = async move { let reader = BufReader::new(stdout).lines(); - if let Err(e) = Self::loop_output_lines(reader, addr.clone(), console.clone()).await { + if let Err(e) = + Self::loop_output_lines(reader, addr.clone(), console.clone(), path.clone()).await + { console.error(format!( "failed to read process output on {:?}: {e:#}", path @@ -310,29 +323,34 @@ impl TaskActor { mut reader: Lines>, addr: Addr>, console: Addr, + path: Arc, ) -> tokio::io::Result<()> { let mut latest_level = OutputLevel::Info; + + let scope = format!("[{path}]", path = path.display()); + let scope = scope.yellow(); + while let Some(line) = reader.next_line().await? { if let Some(debug) = line.strip_prefix("debug: ") { - console.debug(debug.to_string()); + console.debug(format!("{scope} {debug}")); latest_level = OutputLevel::Debug; continue; } if let Some(info) = line.strip_prefix("info: ") { - console.info(info.to_string()); + console.info(format!("{scope} {info}")); latest_level = OutputLevel::Info; continue; } if let Some(warn) = line.strip_prefix("warning: ") { - console.warning(warn.to_string()); + console.warning(format!("{scope} {warn}")); latest_level = OutputLevel::Warning; continue; } if let Some(error) = line.strip_prefix("error: ") { - console.error(error.to_string()); + console.error(format!("{scope} {error}")); latest_level = OutputLevel::Error; continue; } @@ -351,16 +369,16 @@ impl TaskActor { match latest_level { OutputLevel::Debug => { - console.debug(format!("> {}", line)); + console.debug(format!("{scope}>{line}")); } OutputLevel::Info => { - console.info(format!("> {}", line)); + console.info(format!("{scope}>{line}")); } OutputLevel::Warning => { - console.warning(format!("> {}", line)); + console.warning(format!("{scope}>{line}")); } OutputLevel::Error => { - console.error(format!("> {}", line)); + console.error(format!("{scope}>{line}")); } } } @@ -372,14 +390,15 @@ impl Handler> for TaskActor { type Result = (); fn handle(&mut self, message: CollectOutput, ctx: &mut Context) -> Self::Result { - match &message.0 { - Ok(data) => { - self.console.info(self.action.get_success_message(&data)); - } - Err(data) => { - self.console.error(self.action.get_failure_message(&data)); - } - } + self.action.finalize( + &message.0, + ActionFinalizeContext { + config: self.config.clone(), + task_manager: self.task_manager.clone(), + task: ctx.address(), + console: self.console.clone(), + }, + ); self.collected_output.push(message.0); } } diff --git a/meta-cli/src/deploy/actors/task/action.rs b/meta-cli/src/deploy/actors/task/action.rs index 1fa486e23d..5703b0da6f 100644 --- a/meta-cli/src/deploy/actors/task/action.rs +++ b/meta-cli/src/deploy/actors/task/action.rs @@ -1,11 +1,10 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use super::{command::CommandBuilder, TaskConfig}; +use super::TaskActor; +use crate::deploy::actors::task_manager::TaskManager; use crate::interlude::*; -use common::typegraph::Typegraph; -use owo_colors::OwoColorize; -use serde::Deserialize; +use crate::{config::Config, deploy::actors::console::ConsoleActor}; use std::{path::Path, sync::Arc}; use tokio::{process::Command, sync::OwnedSemaphorePermit}; @@ -15,6 +14,13 @@ pub trait TaskActionGenerator: Clone { fn generate(&self, path: Arc, permit: OwnedSemaphorePermit) -> Self::Action; } +pub struct ActionFinalizeContext { + pub config: Arc, + pub task_manager: Addr>, + pub task: Addr>, + pub console: Addr, +} + pub trait OutputData: serde::de::DeserializeOwned + std::fmt::Debug + Unpin + Send { fn get_typegraph_name(&self) -> String; } @@ -29,9 +35,9 @@ pub trait TaskAction: std::fmt::Debug + Clone + Send + Unpin { fn get_path_owned(&self) -> Arc; fn get_start_message(&self) -> String; - fn get_success_message(&self, output: &Self::SuccessData) -> String; - fn get_failure_message(&self, output: &Self::FailureData) -> String; fn get_error_message(&self, err: &str) -> String; + + fn finalize(&self, res: &ActionResult, ctx: ActionFinalizeContext); } pub type ActionResult = Result; @@ -42,240 +48,3 @@ pub fn get_typegraph_name(res: &ActionResult) -> String { Err(failure) => failure.get_typegraph_name(), } } - -pub use deploy::*; -pub use serialize::*; - -mod serialize { - use super::*; - - pub type SerializeAction = Arc; - - #[derive(Debug)] - pub struct SerializeActionInner { - path: Arc, - task_config: Arc, - #[allow(unused)] - permit: OwnedSemaphorePermit, - } - - #[derive(Clone)] - pub struct SerializeActionGenerator { - task_config: Arc, - } - - impl SerializeActionGenerator { - pub fn new(task_config: TaskConfig) -> Self { - Self { - task_config: Arc::new(task_config), - } - } - } - - impl TaskActionGenerator for SerializeActionGenerator { - type Action = SerializeAction; - - fn generate(&self, path: Arc, permit: OwnedSemaphorePermit) -> Self::Action { - SerializeActionInner { - path, - task_config: self.task_config.clone(), - permit, - } - .into() - } - } - - #[derive(Deserialize, Debug)] - pub struct SerializeError { - typegraph: String, - error: String, - } - - impl OutputData for Typegraph { - fn get_typegraph_name(&self) -> String { - self.name().unwrap() - } - } - - impl OutputData for SerializeError { - fn get_typegraph_name(&self) -> String { - self.typegraph.clone() - } - } - - impl TaskAction for SerializeAction { - type SuccessData = Typegraph; - type FailureData = SerializeError; - type Generator = SerializeActionGenerator; - - async fn get_command(&self) -> Result { - CommandBuilder { - path: self.task_config.base_dir.to_path_buf().join(&self.path), - task_config: self.task_config.clone(), - action_env: "serialize", - } - .build() - .await - } - - fn get_path(&self) -> &Path { - return &self.path; - } - - fn get_path_owned(&self) -> Arc { - return self.path.clone(); - } - - fn get_start_message(&self) -> String { - format!("starting serialization process for {:?}", self.path) - } - - fn get_success_message(&self, output: &Self::SuccessData) -> String { - format!( - "{icon} successfully serialized typegraph {name} from {path:?}", - icon = "✓".green(), - name = output.get_typegraph_name().cyan(), - path = self.path, - ) - } - - fn get_failure_message(&self, output: &Self::FailureData) -> String { - format!( - "{icon} failed to serialize typegraph {name} from {path:?}: {err}", - icon = "✗".red(), - name = output.get_typegraph_name().cyan(), - path = self.path, - err = output.error, - ) - } - - fn get_error_message(&self, err: &str) -> String { - format!( - "{icon} failed to serialize typegraph(s) from {path:?}: {err}", - icon = "✗".red(), - path = self.path, - err = err, - ) - } - } -} - -mod deploy { - use super::*; - pub type DeployAction = Arc; - - #[derive(Debug)] - pub struct DeployActionInner { - path: Arc, - task_config: Arc, - #[allow(unused)] - permit: OwnedSemaphorePermit, - } - - #[derive(Clone)] - pub struct DeployActionGenerator { - task_config: Arc, - } - - impl DeployActionGenerator { - pub fn new(task_config: TaskConfig) -> Self { - Self { - task_config: Arc::new(task_config), - } - } - } - - impl TaskActionGenerator for DeployActionGenerator { - type Action = DeployAction; - - fn generate(&self, path: Arc, permit: OwnedSemaphorePermit) -> Self::Action { - DeployActionInner { - path, - task_config: self.task_config.clone(), - permit, - } - .into() - } - } - - #[derive(Deserialize, Debug)] - pub struct DeploySuccess { - typegraph: String, - } - - #[derive(Deserialize, Debug)] - pub struct DeployError { - typegraph: String, - error: String, - #[serde(default)] - follow_up: Option, // todo migration - } - - impl OutputData for DeploySuccess { - fn get_typegraph_name(&self) -> String { - self.typegraph.clone() - } - } - - impl OutputData for DeployError { - fn get_typegraph_name(&self) -> String { - self.typegraph.clone() - } - } - - impl TaskAction for DeployAction { - type SuccessData = DeploySuccess; - type FailureData = DeployError; - type Generator = DeployActionGenerator; - - async fn get_command(&self) -> Result { - CommandBuilder { - path: self.task_config.base_dir.to_path_buf().join(&self.path), - task_config: self.task_config.clone(), - action_env: "deploy", - } - .build() - .await - } - - fn get_path(&self) -> &Path { - return &self.path; - } - - fn get_path_owned(&self) -> Arc { - return self.path.clone(); - } - - fn get_start_message(&self) -> String { - format!("starting deployment process for {:?}", self.path) - } - - fn get_success_message(&self, output: &Self::SuccessData) -> String { - format!( - "{icon} successfully deployed typegraph {name} from {path:?}", - icon = "✓".green(), - name = output.get_typegraph_name().cyan(), - path = self.path, - ) - } - - fn get_failure_message(&self, output: &Self::FailureData) -> String { - format!( - "{icon} failed to deploy typegraph {name} from {path:?}: {err}", - icon = "✗".red(), - name = output.get_typegraph_name().cyan(), - path = self.path, - err = output.error, - ) - } - - fn get_error_message(&self, err: &str) -> String { - format!( - "{icon} failed to deploy typegraph(s) from {path:?}: {err}", - icon = "✗".red(), - path = self.path, - err = err, - ) - } - } -} diff --git a/meta-cli/src/deploy/actors/task/deploy.rs b/meta-cli/src/deploy/actors/task/deploy.rs new file mode 100644 index 0000000000..75ab2a879b --- /dev/null +++ b/meta-cli/src/deploy/actors/task/deploy.rs @@ -0,0 +1,179 @@ +use super::action::{ + ActionFinalizeContext, ActionResult, OutputData, TaskAction, TaskActionGenerator, +}; +use super::command::CommandBuilder; +use super::TaskConfig; +use crate::deploy::actors::console::Console; +use crate::deploy::push::pusher::{MessageEntry, Migrations}; +use crate::interlude::*; +use color_eyre::owo_colors::OwoColorize; +use serde::Deserialize; +use std::{path::Path, sync::Arc}; +use tokio::{process::Command, sync::OwnedSemaphorePermit}; + +pub type DeployAction = Arc; + +#[derive(Debug)] +pub struct DeployActionInner { + path: Arc, + task_config: Arc, + #[allow(unused)] + permit: OwnedSemaphorePermit, +} + +#[derive(Clone)] +pub struct DeployActionGenerator { + task_config: Arc, +} + +impl DeployActionGenerator { + pub fn new(task_config: TaskConfig) -> Self { + Self { + task_config: Arc::new(task_config), + } + } +} + +impl TaskActionGenerator for DeployActionGenerator { + type Action = DeployAction; + + fn generate(&self, path: Arc, permit: OwnedSemaphorePermit) -> Self::Action { + DeployActionInner { + path, + task_config: self.task_config.clone(), + permit, + } + .into() + } +} + +#[derive(Deserialize, Debug)] +pub struct DeploySuccess { + pub typegraph: String, + pub messages: Vec, + pub migrations: Vec, + pub failure: Option, +} + +#[derive(Deserialize, Debug)] +pub struct DeployError { + typegraph: String, + error: String, + #[serde(default)] + follow_up: Option, // todo migration +} + +impl OutputData for DeploySuccess { + fn get_typegraph_name(&self) -> String { + self.typegraph.clone() + } +} + +impl OutputData for DeployError { + fn get_typegraph_name(&self) -> String { + self.typegraph.clone() + } +} + +impl TaskAction for DeployAction { + type SuccessData = DeploySuccess; + type FailureData = DeployError; + type Generator = DeployActionGenerator; + + async fn get_command(&self) -> Result { + CommandBuilder { + path: self.task_config.base_dir.to_path_buf().join(&self.path), + task_config: self.task_config.clone(), + action_env: "deploy", + } + .build() + .await + } + + fn get_path(&self) -> &Path { + return &self.path; + } + + fn get_path_owned(&self) -> Arc { + return self.path.clone(); + } + + fn get_start_message(&self) -> String { + format!("starting deployment process for {:?}", self.path) + } + + fn get_error_message(&self, err: &str) -> String { + format!( + "{icon} failed to deploy typegraph(s) from {path:?}: {err}", + icon = "✗".red(), + path = self.path, + err = err, + ) + } + + fn finalize(&self, res: &ActionResult, ctx: ActionFinalizeContext) { + match res { + Ok(data) => { + let scope = format!("({path})", path = self.path.display()); + let scope = scope.yellow(); + + for message in &data.messages { + match message { + MessageEntry::Info(info) => ctx.console.info(format!("{scope} {info}")), + MessageEntry::Warning(warning) => { + ctx.console.warning(format!("{scope} {warning}")) + } + MessageEntry::Error(error) => ctx.console.error(format!("{scope} {error}")), + } + } + + let tg_name = data.get_typegraph_name(); + + let migdir = ctx.config.prisma_migration_dir_abs(&data.typegraph); + for migrations in data.migrations.iter() { + let dest = migdir.join(&migrations.runtime); + if let Err(err) = + common::archive::unpack(&dest, Some(migrations.migrations.clone())) + { + ctx.console.error(format!( + "error while unpacking migrations into {:?}", + migdir + )); + ctx.console.error(format!("{err:?}")); + } else { + ctx.console.info(format!( + "{scope} unpacked migrations for {}/{} at {}", + tg_name.cyan(), + migrations.runtime, + dest.display().bold() + )); + } + } + + match data.failure { + Some(_) => { + todo!(); + } + None => { + ctx.console.info(format!( + "{icon} successfully deployed typegraph {name} from {path}", + icon = "✓".green(), + name = tg_name.cyan(), + path = self.path.display().yellow(), + )); + } + } + } + + Err(data) => { + ctx.console.error(format!( + "{icon} failed to deploy typegraph {name} from {path:?}: {err}", + icon = "✗".red(), + name = data.get_typegraph_name().cyan(), + path = self.path, + err = data.error, + )); + } + } + } +} diff --git a/meta-cli/src/deploy/actors/task/serialize.rs b/meta-cli/src/deploy/actors/task/serialize.rs new file mode 100644 index 0000000000..7cb93b2727 --- /dev/null +++ b/meta-cli/src/deploy/actors/task/serialize.rs @@ -0,0 +1,125 @@ +use super::action::{ + ActionFinalizeContext, ActionResult, OutputData, TaskAction, TaskActionGenerator, +}; +use super::command::CommandBuilder; +use super::TaskConfig; +use crate::deploy::actors::console::Console; +use crate::interlude::*; +use color_eyre::owo_colors::OwoColorize; +use common::typegraph::Typegraph; +use serde::Deserialize; +use std::{path::Path, sync::Arc}; +use tokio::{process::Command, sync::OwnedSemaphorePermit}; + +pub type SerializeAction = Arc; + +#[derive(Debug)] +pub struct SerializeActionInner { + path: Arc, + task_config: Arc, + #[allow(unused)] + permit: OwnedSemaphorePermit, +} + +#[derive(Clone)] +pub struct SerializeActionGenerator { + task_config: Arc, +} + +impl SerializeActionGenerator { + pub fn new(task_config: TaskConfig) -> Self { + Self { + task_config: Arc::new(task_config), + } + } +} + +impl TaskActionGenerator for SerializeActionGenerator { + type Action = SerializeAction; + + fn generate(&self, path: Arc, permit: OwnedSemaphorePermit) -> Self::Action { + SerializeActionInner { + path, + task_config: self.task_config.clone(), + permit, + } + .into() + } +} + +#[derive(Deserialize, Debug)] +pub struct SerializeError { + typegraph: String, + error: String, +} + +impl OutputData for Typegraph { + fn get_typegraph_name(&self) -> String { + self.name().unwrap() + } +} + +impl OutputData for SerializeError { + fn get_typegraph_name(&self) -> String { + self.typegraph.clone() + } +} + +impl TaskAction for SerializeAction { + type SuccessData = Typegraph; + type FailureData = SerializeError; + type Generator = SerializeActionGenerator; + + async fn get_command(&self) -> Result { + CommandBuilder { + path: self.task_config.base_dir.to_path_buf().join(&self.path), + task_config: self.task_config.clone(), + action_env: "serialize", + } + .build() + .await + } + + fn get_path(&self) -> &Path { + return &self.path; + } + + fn get_path_owned(&self) -> Arc { + return self.path.clone(); + } + + fn get_start_message(&self) -> String { + format!("starting serialization process for {:?}", self.path) + } + + fn get_error_message(&self, err: &str) -> String { + format!( + "{icon} failed to serialize typegraph(s) from {path:?}: {err}", + icon = "✗".red(), + path = self.path, + err = err, + ) + } + + fn finalize(&self, res: &ActionResult, ctx: ActionFinalizeContext) { + match res { + Ok(data) => { + ctx.console.info(format!( + "{icon} successfully serialized typegraph {name} from {path:?}", + icon = "✓".green(), + name = data.get_typegraph_name().cyan(), + path = self.path, + )); + } + Err(output) => { + ctx.console.error(format!( + "{icon} failed to serialize typegraph {name} from {path:?}: {err}", + icon = "✗".red(), + name = output.get_typegraph_name().cyan(), + path = self.path, + err = output.error, + )); + } + } + } +} diff --git a/meta-cli/src/deploy/actors/task_manager.rs b/meta-cli/src/deploy/actors/task_manager.rs index 5ce0676fc4..4b626d408f 100644 --- a/meta-cli/src/deploy/actors/task_manager.rs +++ b/meta-cli/src/deploy/actors/task_manager.rs @@ -7,7 +7,7 @@ use futures::channel::oneshot; use indexmap::IndexMap; use tokio::sync::{OwnedSemaphorePermit, Semaphore}; -use crate::interlude::*; +use crate::{config::Config, interlude::*}; use super::{ console::{Console, ConsoleActor}, @@ -88,6 +88,7 @@ pub enum StopReason { } pub struct TaskManager { + config: Arc, action_generator: A::Generator, active_tasks: HashMap, Addr>>, pending_tasks: HashSet>, @@ -100,12 +101,14 @@ pub struct TaskManager { impl TaskManager { pub fn new( + config: Arc, action_generator: A::Generator, max_parallel_tasks: usize, report_tx: oneshot::Sender>, console: Addr, ) -> Self { Self { + config, action_generator, active_tasks: Default::default(), pending_tasks: Default::default(), @@ -222,7 +225,13 @@ impl Handler for TaskManager { .action_generator .generate(message.path.clone(), message.permit); let path = action.get_path_owned(); - let task_addr = TaskActor::new(action, ctx.address(), self.console.clone()).start(); + let task_addr = TaskActor::new( + self.config.clone(), + action, + ctx.address(), + self.console.clone(), + ) + .start(); self.pending_tasks.remove(&path); self.active_tasks.insert(path.clone(), task_addr); } diff --git a/meta-cli/src/deploy/actors/watcher.rs b/meta-cli/src/deploy/actors/watcher.rs index d79516c4da..ed1db67b07 100644 --- a/meta-cli/src/deploy/actors/watcher.rs +++ b/meta-cli/src/deploy/actors/watcher.rs @@ -1,9 +1,15 @@ -use crate::deploy::push::pusher::RetryManager; // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use crate::interlude::*; -use actix::prelude::*; +use super::console::Console; +use super::task::deploy::DeployAction; +use super::task_manager::{self, TaskManager, TaskReason}; +use crate::config::Config; +use crate::deploy::actors::console::ConsoleActor; +use crate::deploy::push::pusher::RetryManager; +use crate::interlude::*; +use crate::typegraph::dependency_graph::DependencyGraph; +use crate::typegraph::loader::discovery::FileFilter; use common::typegraph::Typegraph; use grep::searcher::{BinaryDetection, SearcherBuilder}; use notify_debouncer_mini::notify::{RecommendedWatcher, RecursiveMode}; @@ -11,15 +17,6 @@ use notify_debouncer_mini::{new_debouncer, notify, DebounceEventResult, Debounce use pathdiff::diff_paths; use std::path::{Path, PathBuf}; use std::{sync::Arc, time::Duration}; -use tokio::sync::mpsc; - -use super::console::Console; -use super::task::action::DeployAction; -use super::task_manager::{self, TaskManager, TaskReason}; -use crate::config::Config; -use crate::deploy::actors::console::ConsoleActor; -use crate::typegraph::dependency_graph::DependencyGraph; -use crate::typegraph::loader::discovery::FileFilter; pub mod message { use super::*; diff --git a/meta-cli/src/deploy/push/pusher.rs b/meta-cli/src/deploy/push/pusher.rs index eb8850a5b2..1ec4e1e9d9 100644 --- a/meta-cli/src/deploy/push/pusher.rs +++ b/meta-cli/src/deploy/push/pusher.rs @@ -151,7 +151,7 @@ impl PushResult { // tg workdir + prisma_migration_rel let migdir = ServerStore::get_config() .unwrap() - .prisma_migration_dir_abs(&self.sdk_response.typegraph_path, &self.original_name)?; + .prisma_migration_dir_abs(&self.original_name); for migrations in self.migrations.iter() { let dest = migdir.join(&migrations.runtime); diff --git a/typegraph/node/sdk/src/tg_deploy.ts b/typegraph/node/sdk/src/tg_deploy.ts index 9b6534e4a3..5bd59445fd 100644 --- a/typegraph/node/sdk/src/tg_deploy.ts +++ b/typegraph/node/sdk/src/tg_deploy.ts @@ -30,7 +30,7 @@ export interface TypegraphRemoveParams { export interface DeployResult { serialized: string; - typegate: Record | string; + response: Record; } export interface RemoveResult { @@ -82,9 +82,16 @@ export async function tgDeploy( }), }, `tgDeploy failed to deploy typegraph ${typegraph.name}`); + if (response.errors) { + for (const err of response.errors) { + console.error(err.message); + } + throw new Error(`failed to deploy typegraph ${typegraph.name}`); + } + return { serialized: tgJson, - typegate: response, + response: response.data.addTypegraph, }; } diff --git a/typegraph/node/sdk/src/tg_manage.ts b/typegraph/node/sdk/src/tg_manage.ts index 1fcc4ea506..f515e80cab 100644 --- a/typegraph/node/sdk/src/tg_manage.ts +++ b/typegraph/node/sdk/src/tg_manage.ts @@ -182,34 +182,23 @@ export class Manager { ); } - let deployRes: any; try { - const { typegate } = await tgDeploy(reusableTgOutput, { + const { response } = await tgDeploy(reusableTgOutput, { baseUrl: endpoint, artifactsConfig: config, secrets, auth: new BasicAuth(auth.username, auth.password), typegraphPath: this.#typegraphPath, }); - deployRes = typegate; + + log.success({ typegraph: this.#typegraph.name, ...response }); } catch (err: any) { log.failure({ typegraph: this.#typegraph.name, error: err?.message ?? "failed to deploy typegraph", }); return; - // return await this.#relayErrorToCLI( - // "deploy", - // "deploy_err", - // err?.message ?? "error deploying typegraph to typegate", - // { - // err, - // ...(err.cause ? { cause: err.cause } : {}), - // }, - // ); } - log.debug("deploy result", { deployRes }); - log.success({ typegraph: this.#typegraph.name }); } async #relayResultToCLI(initiator: Command, data: T) { From 92179216fb75f54c5764097b873e56e11225ed05 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Wed, 5 Jun 2024 11:04:46 +0300 Subject: [PATCH 09/35] feat: task io protocol --- examples/metatype.yaml | 18 +- .../20230801223103_generated/migration.sql | 0 .../migrations/blog/blog/migration_lock.toml | 0 .../20230407170041_init/migration.sql | 0 .../database/migration_lock.toml | 0 .../20230407165539_init/migration.sql | 0 .../database/database/migration_lock.toml | 0 .../20230407170028_init/migration.sql | 0 .../fcm/database/migration_lock.toml | 0 .../20240604132537_generated}/migration.sql | 15 +- .../func-gql}/db/migration_lock.toml | 2 +- .../20230407170016_init/migration.sql | 0 .../20230529111022_generated/migration.sql | 0 .../graphql/database/migration_lock.toml | 0 .../demo/20230204231405_init/migration.sql | 0 .../demo/20230204231612_change/migration.sql | 0 .../20230801190752_generated/migration.sql | 0 .../homepage/demo/migration_lock.toml | 0 .../legacy/20230407165628_init/migration.sql | 0 .../legacy/migration_lock.toml | 0 .../legacy/20230415233128_init/migration.sql | 0 .../20230801221728_generated/migration.sql | 0 .../prisma-runtime/legacy/migration_lock.toml | 0 .../20240604130842_generated/migration.sql | 8 + .../database/migration_lock.toml | 3 + .../20231115102057_generated}/migration.sql | 0 .../roadmap-execute/db/migration_lock.toml | 0 .../20231115133338_generated}/migration.sql | 0 .../roadmap-func/db/migration_lock.toml | 0 .../20231115102130_generated}/migration.sql | 0 .../roadmap-policies/db/migration_lock.toml | 2 +- .../20231115102159_generated}/migration.sql | 0 .../roadmap-prisma}/db/migration_lock.toml | 0 .../20231115102319_generated}/migration.sql | 0 .../roadmap-reduce}/db/migration_lock.toml | 0 .../20231115110552_generated}/migration.sql | 0 .../roadmap-rest}/db/migration_lock.toml | 0 examples/typegraphs/basic.ts | 42 +- examples/typegraphs/http-runtime.py | 2 +- examples/typegraphs/http-runtime.ts | 50 ++ examples/typegraphs/jwt.ts | 42 +- examples/typegraphs/math.py | 2 +- examples/typegraphs/math.ts | 98 ++-- examples/typegraphs/oauth2.ts | 42 +- examples/typegraphs/quick-start-project.py | 2 +- examples/typegraphs/quick-start-project.ts | 72 +-- meta-cli/src/cli/deploy.rs | 61 ++- meta-cli/src/cli/serialize.rs | 77 +-- meta-cli/src/config.rs | 22 + meta-cli/src/deploy/actors/task.rs | 157 +++++- meta-cli/src/deploy/actors/task/action.rs | 15 +- meta-cli/src/deploy/actors/task/command.rs | 3 +- meta-cli/src/deploy/actors/task/deploy.rs | 175 +++++-- .../task/deploy/migration_resolution.rs | 2 + .../deploy/actors/task/deploy/migrations.rs | 478 ++++++++++++++++++ meta-cli/src/deploy/actors/task/serialize.rs | 41 +- meta-cli/src/deploy/actors/task_manager.rs | 8 +- .../src/deploy/push/migration_resolution.rs | 132 ----- meta-cli/src/deploy/push/pusher.rs | 407 ++++++++------- typegate/src/errors.ts | 15 +- typegate/src/services/responses.ts | 11 +- typegate/src/typegate/mod.ts | 66 +-- typegraph/core/src/global_store.rs | 5 +- typegraph/core/src/lib.rs | 10 +- typegraph/core/src/typegraph.rs | 18 +- typegraph/core/src/utils/postprocess/mod.rs | 35 +- .../core/src/utils/postprocess/prisma_rt.rs | 35 +- typegraph/core/wit/typegraph.wit | 36 +- typegraph/node/sdk/src/io.ts | 254 ++++++++++ typegraph/node/sdk/src/log.ts | 46 -- typegraph/node/sdk/src/metagen.ts | 48 +- typegraph/node/sdk/src/tg_artifact_upload.ts | 59 ++- typegraph/node/sdk/src/tg_deploy.ts | 92 ++-- typegraph/node/sdk/src/tg_manage.ts | 239 +++++---- typegraph/node/sdk/src/typegraph.ts | 72 +-- typegraph/node/sdk/src/utils/func_utils.ts | 22 +- .../typegate/authentication/index.mdx | 8 +- 77 files changed, 2057 insertions(+), 992 deletions(-) rename {website => examples}/migrations/blog/blog/20230801223103_generated/migration.sql (100%) rename {website => examples}/migrations/blog/blog/migration_lock.toml (100%) rename {website => examples}/migrations/business-logic/database/20230407170041_init/migration.sql (100%) rename {website => examples}/migrations/business-logic/database/migration_lock.toml (100%) rename {website => examples}/migrations/database/database/20230407165539_init/migration.sql (100%) rename {website => examples}/migrations/database/database/migration_lock.toml (100%) rename {website => examples}/migrations/fcm/database/20230407170028_init/migration.sql (100%) rename {website => examples}/migrations/fcm/database/migration_lock.toml (100%) rename {website/migrations/roadmap-rest/db/20231115110552_generated => examples/migrations/func-gql/db/20240604132537_generated}/migration.sql (58%) rename {website/migrations/roadmap-rest => examples/migrations/func-gql}/db/migration_lock.toml (81%) rename {website => examples}/migrations/graphql/database/20230407170016_init/migration.sql (100%) rename {website => examples}/migrations/graphql/database/20230529111022_generated/migration.sql (100%) rename {website => examples}/migrations/graphql/database/migration_lock.toml (100%) rename {website => examples}/migrations/homepage/demo/20230204231405_init/migration.sql (100%) rename {website => examples}/migrations/homepage/demo/20230204231612_change/migration.sql (100%) rename {website => examples}/migrations/homepage/demo/20230801190752_generated/migration.sql (100%) rename {website => examples}/migrations/homepage/demo/migration_lock.toml (100%) rename {website => examples}/migrations/orm-for-the-edge/legacy/20230407165628_init/migration.sql (100%) rename {website => examples}/migrations/orm-for-the-edge/legacy/migration_lock.toml (100%) rename {website => examples}/migrations/prisma-runtime/legacy/20230415233128_init/migration.sql (100%) rename {website => examples}/migrations/prisma-runtime/legacy/20230801221728_generated/migration.sql (100%) rename {website => examples}/migrations/prisma-runtime/legacy/migration_lock.toml (100%) create mode 100644 examples/migrations/quick-start-project/database/20240604130842_generated/migration.sql create mode 100644 examples/migrations/quick-start-project/database/migration_lock.toml rename examples/migrations/{roadmap-policies/db/20240531152240_generated => roadmap-execute/db/20231115102057_generated}/migration.sql (100%) rename {website => examples}/migrations/roadmap-execute/db/migration_lock.toml (100%) rename {website/migrations/roadmap-execute/db/20231115102057_generated => examples/migrations/roadmap-func/db/20231115133338_generated}/migration.sql (100%) rename {website => examples}/migrations/roadmap-func/db/migration_lock.toml (100%) rename {website/migrations/roadmap-func/db/20231115133338_generated => examples/migrations/roadmap-policies/db/20231115102130_generated}/migration.sql (100%) rename {website/migrations/roadmap-policies/db/20231115102130_generated => examples/migrations/roadmap-prisma/db/20231115102159_generated}/migration.sql (100%) rename {website/migrations/roadmap-policies => examples/migrations/roadmap-prisma}/db/migration_lock.toml (100%) rename {website/migrations/roadmap-prisma/db/20231115102159_generated => examples/migrations/roadmap-reduce/db/20231115102319_generated}/migration.sql (100%) rename {website/migrations/roadmap-prisma => examples/migrations/roadmap-reduce}/db/migration_lock.toml (100%) rename {website/migrations/roadmap-reduce/db/20231115102319_generated => examples/migrations/roadmap-rest/db/20231115110552_generated}/migration.sql (100%) rename {website/migrations/roadmap-reduce => examples/migrations/roadmap-rest}/db/migration_lock.toml (100%) create mode 100644 examples/typegraphs/http-runtime.ts create mode 100644 meta-cli/src/deploy/actors/task/deploy/migration_resolution.rs create mode 100644 meta-cli/src/deploy/actors/task/deploy/migrations.rs create mode 100644 typegraph/node/sdk/src/io.ts delete mode 100644 typegraph/node/sdk/src/log.ts diff --git a/examples/metatype.yaml b/examples/metatype.yaml index 3a61cda757..bc049975cc 100644 --- a/examples/metatype.yaml +++ b/examples/metatype.yaml @@ -4,6 +4,8 @@ typegates: username: admin password: password secrets: + quick-start-project: + POSTGRES: postgresql://postgres:password@localhost:5432/db?schema=quick_start_project basic-authentication: BASIC_admin: "password" jwt-authentication: @@ -67,18 +69,20 @@ typegates: roadmap: BASIC_andim: hunter2 func-gql: - POSTGRES: "postgresql://postgres:password@localhost:5432/db?schema=func_ctx" + POSTGRES: "postgresql://postgres:password@localhost:5432/db?schema=func_gql" prd: url: https://demo.metatype.dev username: admin password: infisical://app.infisical.com/643057c5bb17b13ef6e73d99/prod/TG_ADMIN_PASSWORD secrets: + quick-start-project: + POSTGRES: infisical://app.infisical.com/643057c5bb17b13ef6e73d99/prod/TG_QUICK_START_PROJECT_POSTGRES basic-authentication: BASIC_admin: "password" jwt-authentication: CUSTOM_JWT: "wOoyc8ijEHP99oASvJ0IXmHDOMYQH6" - oauth2-authentication: + oauth2_authentication: GITHUB_CLIENT_ID: infisical://app.infisical.com/643057c5bb17b13ef6e73d99/prod/GITHUB_CLIENT_ID GITHUB_CLIENT_SECRET: infisical://app.infisical.com/643057c5bb17b13ef6e73d99/prod/GITHUB_CLIENT_SECRET authentication: @@ -112,10 +116,10 @@ typegates: GITHUB_CLIENT_ID: infisical://app.infisical.com/643057c5bb17b13ef6e73d99/prod/GITHUB_CLIENT_ID GITHUB_CLIENT_SECRET: infisical://app.infisical.com/643057c5bb17b13ef6e73d99/prod/GITHUB_CLIENT_SECRET files-upload: - S3_HOST: http://localhost:9000 - S3_REGION: local - S3_ACCESS_KEY: minio - S3_SECRET_KEY: password + S3_HOST: infisical://app.infisical.com/643057c5bb17b13ef6e73d99/prod/S3_HOST + S3_REGION: infisical://app.infisical.com/643057c5bb17b13ef6e73d99/prod/S3_REGION + S3_ACCESS_KEY: infisical://app.infisical.com/643057c5bb17b13ef6e73d99/prod/S3_ACCESS_KEY + S3_SECRET_KEY: infisical://app.infisical.com/643057c5bb17b13ef6e73d99/prod/S3_SECRET_KEY roadmap-prisma: POSTGRES: infisical://app.infisical.com/643057c5bb17b13ef6e73d99/prod/TG_ROADMAP_PRISMA_POSTGRES BASIC_andim: hunter2 @@ -136,6 +140,8 @@ typegates: BASIC_andim: hunter2 roadmap: BASIC_andim: hunter2 + func-gql: + POSTGRES: infisical://app.infisical.com/643057c5bb17b13ef6e73d99/prod/TG_FUNC_GQL_POSTGRES typegraphs: materializers: diff --git a/website/migrations/blog/blog/20230801223103_generated/migration.sql b/examples/migrations/blog/blog/20230801223103_generated/migration.sql similarity index 100% rename from website/migrations/blog/blog/20230801223103_generated/migration.sql rename to examples/migrations/blog/blog/20230801223103_generated/migration.sql diff --git a/website/migrations/blog/blog/migration_lock.toml b/examples/migrations/blog/blog/migration_lock.toml similarity index 100% rename from website/migrations/blog/blog/migration_lock.toml rename to examples/migrations/blog/blog/migration_lock.toml diff --git a/website/migrations/business-logic/database/20230407170041_init/migration.sql b/examples/migrations/business-logic/database/20230407170041_init/migration.sql similarity index 100% rename from website/migrations/business-logic/database/20230407170041_init/migration.sql rename to examples/migrations/business-logic/database/20230407170041_init/migration.sql diff --git a/website/migrations/business-logic/database/migration_lock.toml b/examples/migrations/business-logic/database/migration_lock.toml similarity index 100% rename from website/migrations/business-logic/database/migration_lock.toml rename to examples/migrations/business-logic/database/migration_lock.toml diff --git a/website/migrations/database/database/20230407165539_init/migration.sql b/examples/migrations/database/database/20230407165539_init/migration.sql similarity index 100% rename from website/migrations/database/database/20230407165539_init/migration.sql rename to examples/migrations/database/database/20230407165539_init/migration.sql diff --git a/website/migrations/database/database/migration_lock.toml b/examples/migrations/database/database/migration_lock.toml similarity index 100% rename from website/migrations/database/database/migration_lock.toml rename to examples/migrations/database/database/migration_lock.toml diff --git a/website/migrations/fcm/database/20230407170028_init/migration.sql b/examples/migrations/fcm/database/20230407170028_init/migration.sql similarity index 100% rename from website/migrations/fcm/database/20230407170028_init/migration.sql rename to examples/migrations/fcm/database/20230407170028_init/migration.sql diff --git a/website/migrations/fcm/database/migration_lock.toml b/examples/migrations/fcm/database/migration_lock.toml similarity index 100% rename from website/migrations/fcm/database/migration_lock.toml rename to examples/migrations/fcm/database/migration_lock.toml diff --git a/website/migrations/roadmap-rest/db/20231115110552_generated/migration.sql b/examples/migrations/func-gql/db/20240604132537_generated/migration.sql similarity index 58% rename from website/migrations/roadmap-rest/db/20231115110552_generated/migration.sql rename to examples/migrations/func-gql/db/20240604132537_generated/migration.sql index cad0ad9dc9..c9dfe67ff4 100644 --- a/website/migrations/roadmap-rest/db/20231115110552_generated/migration.sql +++ b/examples/migrations/func-gql/db/20240604132537_generated/migration.sql @@ -1,17 +1,9 @@ --- CreateTable -CREATE TABLE "bucket" ( - "id" SERIAL NOT NULL, - "name" TEXT NOT NULL, - - CONSTRAINT "bucket_pkey" PRIMARY KEY ("id") -); - -- CreateTable CREATE TABLE "idea" ( "id" UUID NOT NULL, "name" TEXT NOT NULL, + "desc" TEXT, "authorEmail" TEXT NOT NULL, - "bucketId" INTEGER NOT NULL, CONSTRAINT "idea_pkey" PRIMARY KEY ("id") ); @@ -20,15 +12,10 @@ CREATE TABLE "idea" ( CREATE TABLE "vote" ( "id" UUID NOT NULL, "authorEmail" TEXT NOT NULL, - "importance" TEXT, - "desc" TEXT, "ideaId" UUID NOT NULL, CONSTRAINT "vote_pkey" PRIMARY KEY ("id") ); --- AddForeignKey -ALTER TABLE "idea" ADD CONSTRAINT "idea_bucketId_fkey" FOREIGN KEY ("bucketId") REFERENCES "bucket"("id") ON DELETE RESTRICT ON UPDATE CASCADE; - -- AddForeignKey ALTER TABLE "vote" ADD CONSTRAINT "vote_ideaId_fkey" FOREIGN KEY ("ideaId") REFERENCES "idea"("id") ON DELETE RESTRICT ON UPDATE CASCADE; diff --git a/website/migrations/roadmap-rest/db/migration_lock.toml b/examples/migrations/func-gql/db/migration_lock.toml similarity index 81% rename from website/migrations/roadmap-rest/db/migration_lock.toml rename to examples/migrations/func-gql/db/migration_lock.toml index 99e4f20090..fbffa92c2b 100644 --- a/website/migrations/roadmap-rest/db/migration_lock.toml +++ b/examples/migrations/func-gql/db/migration_lock.toml @@ -1,3 +1,3 @@ # Please do not edit this file manually # It should be added in your version-control system (i.e. Git) -provider = "postgresql" +provider = "postgresql" \ No newline at end of file diff --git a/website/migrations/graphql/database/20230407170016_init/migration.sql b/examples/migrations/graphql/database/20230407170016_init/migration.sql similarity index 100% rename from website/migrations/graphql/database/20230407170016_init/migration.sql rename to examples/migrations/graphql/database/20230407170016_init/migration.sql diff --git a/website/migrations/graphql/database/20230529111022_generated/migration.sql b/examples/migrations/graphql/database/20230529111022_generated/migration.sql similarity index 100% rename from website/migrations/graphql/database/20230529111022_generated/migration.sql rename to examples/migrations/graphql/database/20230529111022_generated/migration.sql diff --git a/website/migrations/graphql/database/migration_lock.toml b/examples/migrations/graphql/database/migration_lock.toml similarity index 100% rename from website/migrations/graphql/database/migration_lock.toml rename to examples/migrations/graphql/database/migration_lock.toml diff --git a/website/migrations/homepage/demo/20230204231405_init/migration.sql b/examples/migrations/homepage/demo/20230204231405_init/migration.sql similarity index 100% rename from website/migrations/homepage/demo/20230204231405_init/migration.sql rename to examples/migrations/homepage/demo/20230204231405_init/migration.sql diff --git a/website/migrations/homepage/demo/20230204231612_change/migration.sql b/examples/migrations/homepage/demo/20230204231612_change/migration.sql similarity index 100% rename from website/migrations/homepage/demo/20230204231612_change/migration.sql rename to examples/migrations/homepage/demo/20230204231612_change/migration.sql diff --git a/website/migrations/homepage/demo/20230801190752_generated/migration.sql b/examples/migrations/homepage/demo/20230801190752_generated/migration.sql similarity index 100% rename from website/migrations/homepage/demo/20230801190752_generated/migration.sql rename to examples/migrations/homepage/demo/20230801190752_generated/migration.sql diff --git a/website/migrations/homepage/demo/migration_lock.toml b/examples/migrations/homepage/demo/migration_lock.toml similarity index 100% rename from website/migrations/homepage/demo/migration_lock.toml rename to examples/migrations/homepage/demo/migration_lock.toml diff --git a/website/migrations/orm-for-the-edge/legacy/20230407165628_init/migration.sql b/examples/migrations/orm-for-the-edge/legacy/20230407165628_init/migration.sql similarity index 100% rename from website/migrations/orm-for-the-edge/legacy/20230407165628_init/migration.sql rename to examples/migrations/orm-for-the-edge/legacy/20230407165628_init/migration.sql diff --git a/website/migrations/orm-for-the-edge/legacy/migration_lock.toml b/examples/migrations/orm-for-the-edge/legacy/migration_lock.toml similarity index 100% rename from website/migrations/orm-for-the-edge/legacy/migration_lock.toml rename to examples/migrations/orm-for-the-edge/legacy/migration_lock.toml diff --git a/website/migrations/prisma-runtime/legacy/20230415233128_init/migration.sql b/examples/migrations/prisma-runtime/legacy/20230415233128_init/migration.sql similarity index 100% rename from website/migrations/prisma-runtime/legacy/20230415233128_init/migration.sql rename to examples/migrations/prisma-runtime/legacy/20230415233128_init/migration.sql diff --git a/website/migrations/prisma-runtime/legacy/20230801221728_generated/migration.sql b/examples/migrations/prisma-runtime/legacy/20230801221728_generated/migration.sql similarity index 100% rename from website/migrations/prisma-runtime/legacy/20230801221728_generated/migration.sql rename to examples/migrations/prisma-runtime/legacy/20230801221728_generated/migration.sql diff --git a/website/migrations/prisma-runtime/legacy/migration_lock.toml b/examples/migrations/prisma-runtime/legacy/migration_lock.toml similarity index 100% rename from website/migrations/prisma-runtime/legacy/migration_lock.toml rename to examples/migrations/prisma-runtime/legacy/migration_lock.toml diff --git a/examples/migrations/quick-start-project/database/20240604130842_generated/migration.sql b/examples/migrations/quick-start-project/database/20240604130842_generated/migration.sql new file mode 100644 index 0000000000..c4fb8de063 --- /dev/null +++ b/examples/migrations/quick-start-project/database/20240604130842_generated/migration.sql @@ -0,0 +1,8 @@ +-- CreateTable +CREATE TABLE "message" ( + "id" SERIAL NOT NULL, + "title" TEXT NOT NULL, + "body" TEXT NOT NULL, + + CONSTRAINT "message_pkey" PRIMARY KEY ("id") +); diff --git a/examples/migrations/quick-start-project/database/migration_lock.toml b/examples/migrations/quick-start-project/database/migration_lock.toml new file mode 100644 index 0000000000..fbffa92c2b --- /dev/null +++ b/examples/migrations/quick-start-project/database/migration_lock.toml @@ -0,0 +1,3 @@ +# Please do not edit this file manually +# It should be added in your version-control system (i.e. Git) +provider = "postgresql" \ No newline at end of file diff --git a/examples/migrations/roadmap-policies/db/20240531152240_generated/migration.sql b/examples/migrations/roadmap-execute/db/20231115102057_generated/migration.sql similarity index 100% rename from examples/migrations/roadmap-policies/db/20240531152240_generated/migration.sql rename to examples/migrations/roadmap-execute/db/20231115102057_generated/migration.sql diff --git a/website/migrations/roadmap-execute/db/migration_lock.toml b/examples/migrations/roadmap-execute/db/migration_lock.toml similarity index 100% rename from website/migrations/roadmap-execute/db/migration_lock.toml rename to examples/migrations/roadmap-execute/db/migration_lock.toml diff --git a/website/migrations/roadmap-execute/db/20231115102057_generated/migration.sql b/examples/migrations/roadmap-func/db/20231115133338_generated/migration.sql similarity index 100% rename from website/migrations/roadmap-execute/db/20231115102057_generated/migration.sql rename to examples/migrations/roadmap-func/db/20231115133338_generated/migration.sql diff --git a/website/migrations/roadmap-func/db/migration_lock.toml b/examples/migrations/roadmap-func/db/migration_lock.toml similarity index 100% rename from website/migrations/roadmap-func/db/migration_lock.toml rename to examples/migrations/roadmap-func/db/migration_lock.toml diff --git a/website/migrations/roadmap-func/db/20231115133338_generated/migration.sql b/examples/migrations/roadmap-policies/db/20231115102130_generated/migration.sql similarity index 100% rename from website/migrations/roadmap-func/db/20231115133338_generated/migration.sql rename to examples/migrations/roadmap-policies/db/20231115102130_generated/migration.sql diff --git a/examples/migrations/roadmap-policies/db/migration_lock.toml b/examples/migrations/roadmap-policies/db/migration_lock.toml index fbffa92c2b..99e4f20090 100644 --- a/examples/migrations/roadmap-policies/db/migration_lock.toml +++ b/examples/migrations/roadmap-policies/db/migration_lock.toml @@ -1,3 +1,3 @@ # Please do not edit this file manually # It should be added in your version-control system (i.e. Git) -provider = "postgresql" \ No newline at end of file +provider = "postgresql" diff --git a/website/migrations/roadmap-policies/db/20231115102130_generated/migration.sql b/examples/migrations/roadmap-prisma/db/20231115102159_generated/migration.sql similarity index 100% rename from website/migrations/roadmap-policies/db/20231115102130_generated/migration.sql rename to examples/migrations/roadmap-prisma/db/20231115102159_generated/migration.sql diff --git a/website/migrations/roadmap-policies/db/migration_lock.toml b/examples/migrations/roadmap-prisma/db/migration_lock.toml similarity index 100% rename from website/migrations/roadmap-policies/db/migration_lock.toml rename to examples/migrations/roadmap-prisma/db/migration_lock.toml diff --git a/website/migrations/roadmap-prisma/db/20231115102159_generated/migration.sql b/examples/migrations/roadmap-reduce/db/20231115102319_generated/migration.sql similarity index 100% rename from website/migrations/roadmap-prisma/db/20231115102159_generated/migration.sql rename to examples/migrations/roadmap-reduce/db/20231115102319_generated/migration.sql diff --git a/website/migrations/roadmap-prisma/db/migration_lock.toml b/examples/migrations/roadmap-reduce/db/migration_lock.toml similarity index 100% rename from website/migrations/roadmap-prisma/db/migration_lock.toml rename to examples/migrations/roadmap-reduce/db/migration_lock.toml diff --git a/website/migrations/roadmap-reduce/db/20231115102319_generated/migration.sql b/examples/migrations/roadmap-rest/db/20231115110552_generated/migration.sql similarity index 100% rename from website/migrations/roadmap-reduce/db/20231115102319_generated/migration.sql rename to examples/migrations/roadmap-rest/db/20231115110552_generated/migration.sql diff --git a/website/migrations/roadmap-reduce/db/migration_lock.toml b/examples/migrations/roadmap-rest/db/migration_lock.toml similarity index 100% rename from website/migrations/roadmap-reduce/db/migration_lock.toml rename to examples/migrations/roadmap-rest/db/migration_lock.toml diff --git a/examples/typegraphs/basic.ts b/examples/typegraphs/basic.ts index edcefc8afe..6e124f7846 100644 --- a/examples/typegraphs/basic.ts +++ b/examples/typegraphs/basic.ts @@ -5,24 +5,30 @@ import { DenoRuntime } from "@typegraph/sdk/runtimes/deno.js"; // skip:end -await typegraph({ - name: "basic-authentication", - // skip:next-line - cors: { allowOrigin: ["https://metatype.dev", "http://localhost:3000"] }, -}, (g) => { - const deno = new DenoRuntime(); - const pub = Policy.public(); +await typegraph( + { + name: "basic-authentication", + // skip:next-line + cors: { allowOrigin: ["https://metatype.dev", "http://localhost:3000"] }, + }, + (g) => { + const deno = new DenoRuntime(); + const pub = Policy.public(); - const ctx = t.struct({ - "username": t.string().optional(), - }); + const ctx = t.struct({ + username: t.string().optional(), + }); - // highlight-next-line - g.auth(Auth.basic(["admin"])); + // highlight-next-line + g.auth(Auth.basic(["admin"])); - g.expose({ - get_context: deno.identity(ctx).apply({ - username: g.fromContext("username"), - }).withPolicy(pub), - }); -}); + g.expose({ + get_context: deno + .identity(ctx) + .apply({ + username: g.fromContext("username"), + }) + .withPolicy(pub), + }); + }, +); diff --git a/examples/typegraphs/http-runtime.py b/examples/typegraphs/http-runtime.py index 2d8841a517..29034017e5 100644 --- a/examples/typegraphs/http-runtime.py +++ b/examples/typegraphs/http-runtime.py @@ -9,7 +9,7 @@ # skip:next-line cors=Cors(allow_origin=["https://metatype.dev", "http://localhost:3000"]), ) -def http_example(g: Graph): +def http_runtime(g: Graph): pub = Policy.public() facts = HttpRuntime("https://uselessfacts.jsph.pl/api/v2/facts") diff --git a/examples/typegraphs/http-runtime.ts b/examples/typegraphs/http-runtime.ts new file mode 100644 index 0000000000..2009762436 --- /dev/null +++ b/examples/typegraphs/http-runtime.ts @@ -0,0 +1,50 @@ +// skip:start +import { Policy, t, typegraph } from "@typegraph/sdk/index.js"; + +// isort: off +// skip:end +// highlight-next-line +import { HttpRuntime } from "@typegraph/sdk/runtimes/http.js"; + +await typegraph( + { + name: "http-runtime", + // skip:next-line + cors: { allowOrigin: ["https://metatype.dev", "http://localhost:3000"] }, + }, + (g) => { + // highlight-next-line + const facts = new HttpRuntime("https://uselessfacts.jsph.pl/api/v2/facts"); + const pub = Policy.public(); + + g.expose( + { + facts: facts.get( + t.struct({ + language: t.enum_(["en", "de"]), + }), + t.struct({ + id: t.string(), + text: t.string(), + source: t.string(), + source_url: t.string(), + language: t.string(), + permalink: t.string(), + }), + { + path: "/random", + }, + ), + facts_as_text: facts.get( + t.struct({ + header_accept: t.string().set("text/plain"), + language: t.enum_(["en", "de"]), + }), + t.string(), + { path: "/random", headerPrefix: "header_" }, + ), + }, + pub, + ); + }, +); diff --git a/examples/typegraphs/jwt.ts b/examples/typegraphs/jwt.ts index 2b0be121e7..66c2d8b754 100644 --- a/examples/typegraphs/jwt.ts +++ b/examples/typegraphs/jwt.ts @@ -5,23 +5,29 @@ import { DenoRuntime } from "@typegraph/sdk/runtimes/deno.js"; // skip:end -typegraph({ - name: "jwt-authentication", - // skip:next-line - cors: { allowOrigin: ["https://metatype.dev", "http://localhost:3000"] }, -}, (g) => { - const deno = new DenoRuntime(); - const pub = Policy.public(); +typegraph( + { + name: "jwt-authentication", + // skip:next-line + cors: { allowOrigin: ["https://metatype.dev", "http://localhost:3000"] }, + }, + (g) => { + const deno = new DenoRuntime(); + const pub = Policy.public(); - const ctx = t.struct({ - "your_own_content": t.string().optional(), - }); - // highlight-next-line - g.auth(Auth.hmac256("custom")); + const ctx = t.struct({ + your_own_content: t.string().optional(), + }); + // highlight-next-line + g.auth(Auth.hmac256("custom")); - g.expose({ - get_context: deno.identity(ctx).apply({ - your_own_content: g.fromContext("your_own_content"), - }), - }, pub); -}); + g.expose( + { + get_context: deno.identity(ctx).apply({ + your_own_content: g.fromContext("your_own_content"), + }), + }, + pub, + ); + }, +); diff --git a/examples/typegraphs/math.py b/examples/typegraphs/math.py index 90cfd5d70a..728180cc0d 100644 --- a/examples/typegraphs/math.py +++ b/examples/typegraphs/math.py @@ -27,7 +27,7 @@ def math(g: Graph): # the policy implementation is based on functions as well restrict_referer = deno.policy( "restrict_referer_policy", - '(_, context) => context.headers.referer && ["localhost", "metatype"].includes(new URL(context.headers.referer).hostname)', + '(_, context) => context.headers.referer && ["localhost", "metatype.dev"].includes(new URL(context.headers.referer).hostname)', ) g.expose( diff --git a/examples/typegraphs/math.ts b/examples/typegraphs/math.ts index daf9818b81..ccd3f97004 100644 --- a/examples/typegraphs/math.ts +++ b/examples/typegraphs/math.ts @@ -3,50 +3,58 @@ import { Policy, t, typegraph } from "@typegraph/sdk/index.js"; // skip:end import { DenoRuntime } from "@typegraph/sdk/runtimes/deno.js"; -typegraph({ - name: "math", - // skip:start - rate: { windowLimit: 2000, windowSec: 60, queryLimit: 200, localExcess: 0 }, - cors: { allowOrigin: ["https://metatype.dev", "http://localhost:3000"] }, - // skip:end -}, (g) => { - const pub = Policy.public(); - - // we need a runtime to run the functions on - const deno = new DenoRuntime(); - - // we can provide the function code inline - const random_item_fn = - "({ items }) => items[Math.floor(Math.random() * items.length)]"; - - // the policy implementation is based on functions itself - const restrict_referer = deno.policy( - "restrict_referer_policy", - '(_, context) => context.headers.referer && ["localhost", "metatype"].includes(new URL(context.headers.referer).hostname)', - ); - - // or we can point to a local file that's accessible to the meta-cli - const fib_module = "scripts/fib.ts"; - - g.expose({ - // all materializers have inputs and outputs - fib: deno.import( - t.struct({ "size": t.integer() }), - t.list(t.float()), +console.log("debug: debug test"); + +await typegraph( + { + name: "math", + // skip:start + rate: { windowLimit: 2000, windowSec: 60, queryLimit: 200, localExcess: 0 }, + cors: { allowOrigin: ["https://metatype.dev", "http://localhost:3000"] }, + // skip:end + }, + (g) => { + const pub = Policy.public(); + + // we need a runtime to run the functions on + const deno = new DenoRuntime(); + + // we can provide the function code inline + const random_item_fn = + "({ items }) => items[Math.floor(Math.random() * items.length)]"; + + // the policy implementation is based on functions itself + const restrict_referer = deno.policy( + "restrict_referer_policy", + '(_, context) => context.headers.referer && ["localhost", "metatype.dev"].includes(new URL(context.headers.referer).hostname)', + ); + + // or we can point to a local file that's accessible to the meta-cli + const fib_module = "scripts/fib.ts"; + + g.expose( { - module: fib_module, - name: "default", // name the exported function to run + // all materializers have inputs and outputs + fib: deno + .import(t.struct({ size: t.integer() }), t.list(t.float()), { + module: fib_module, + name: "default", // name the exported function to run + }) + .withPolicy(restrict_referer), + randomItem: deno.func( + t.struct({ items: t.list(t.string()) }), + t.string(), + { code: random_item_fn }, + ), + random: deno.func( + t.struct({}), + t.float(), + { code: "() => Math.random()" }, // more inline code + ), }, - ).withPolicy(restrict_referer), - randomItem: deno.func( - t.struct({ "items": t.list(t.string()) }), - t.string(), - { code: random_item_fn }, - ), - random: deno.func( - t.struct({}), - t.float(), - { code: "() => Math.random()" }, // more inline code - ), - }, pub); -}); + pub, + ); + }, +); + +console.log("debug: end of math.ts"); diff --git a/examples/typegraphs/oauth2.ts b/examples/typegraphs/oauth2.ts index cf98a9a72a..0ffcdf0177 100644 --- a/examples/typegraphs/oauth2.ts +++ b/examples/typegraphs/oauth2.ts @@ -5,25 +5,29 @@ import { DenoRuntime } from "@typegraph/sdk/runtimes/deno.js"; // skip:end -typegraph({ - name: "oauth2-authentication", - // skip:next-line - cors: { allowOrigin: ["https://metatype.dev", "http://localhost:3000"] }, -}, (g) => { - const deno = new DenoRuntime(); - const pub = Policy.public(); +typegraph( + { + name: "oauth2-authentication", + // skip:next-line + cors: { allowOrigin: ["https://metatype.dev", "http://localhost:3000"] }, + }, + (g) => { + const deno = new DenoRuntime(); + const pub = Policy.public(); - const ctx = t.struct({ "exp": t.integer().optional() }); + const ctx = t.struct({ exp: t.integer().optional() }); - // highlight-start - g.auth( - Auth.oauth2Github("openid profile email"), - ); - // highlight-end + // highlight-start + g.auth(Auth.oauth2Github("openid profile email")); + // highlight-end - g.expose({ - get_context: deno.identity(ctx).apply({ - exp: g.fromContext("exp"), - }), - }, pub); -}); + g.expose( + { + get_context: deno.identity(ctx).apply({ + exp: g.fromContext("exp"), + }), + }, + pub, + ); + }, +); diff --git a/examples/typegraphs/quick-start-project.py b/examples/typegraphs/quick-start-project.py index 3ef479245a..eb359e486e 100644 --- a/examples/typegraphs/quick-start-project.py +++ b/examples/typegraphs/quick-start-project.py @@ -5,7 +5,7 @@ @typegraph() -def example(g: Graph): +def quick_start_project(g: Graph): # access control public = Policy.public() diff --git a/examples/typegraphs/quick-start-project.ts b/examples/typegraphs/quick-start-project.ts index c3659fdfb3..485f03917d 100644 --- a/examples/typegraphs/quick-start-project.ts +++ b/examples/typegraphs/quick-start-project.ts @@ -3,39 +3,45 @@ import { DenoRuntime } from "@typegraph/sdk/runtimes/deno.js"; import { PythonRuntime } from "@typegraph/sdk/runtimes/python.js"; import { PrismaRuntime } from "@typegraph/sdk/providers/prisma.js"; -typegraph("example", (g) => { - // access control - const pub = Policy.public(); +typegraph( + { + name: "quick-start-project", + cors: { allowOrigin: ["https://metatype.dev", "http://localhost:3000"] }, + }, + (g) => { + // access control + const pub = Policy.public(); - // runtimes - const deno = new DenoRuntime(); - const python = new PythonRuntime(); - const db = new PrismaRuntime("database", "POSTGRES"); + // runtimes + const deno = new DenoRuntime(); + const python = new PythonRuntime(); + const db = new PrismaRuntime("database", "POSTGRES"); - // database tables - const message = t.struct( - { - "id": t.integer({}, { asId: true, config: { auto: true } }), // configuring our primary key - "title": t.string(), - "body": t.string(), - }, - { name: "message" }, // the name of our type - ); + // database tables + const message = t.struct( + { + id: t.integer({}, { asId: true, config: { auto: true } }), // configuring our primary key + title: t.string(), + body: t.string(), + }, + { name: "message" }, // the name of our type + ); - g.expose({ - add: python - .fromLambda( - t.struct({ first: t.float(), second: t.float() }), - t.float(), - { code: "lambda x: x['first'] + x['second']" }, - ) - .withPolicy(pub), - multiply: deno - .func(t.struct({ first: t.float(), second: t.float() }), t.float(), { - code: "({first, second}) => first * second", - }) - .withPolicy(pub), - create_message: db.create(message).withPolicy(pub), - list_messages: db.findMany(message).withPolicy(pub), - }); -}); + g.expose({ + add: python + .fromLambda( + t.struct({ first: t.float(), second: t.float() }), + t.float(), + { code: "lambda x: x['first'] + x['second']" }, + ) + .withPolicy(pub), + multiply: deno + .func(t.struct({ first: t.float(), second: t.float() }), t.float(), { + code: "({first, second}) => first * second", + }) + .withPolicy(pub), + create_message: db.create(message).withPolicy(pub), + list_messages: db.findMany(message).withPolicy(pub), + }); + }, +); diff --git a/meta-cli/src/cli/deploy.rs b/meta-cli/src/cli/deploy.rs index f853813e4d..9165ee1ea8 100644 --- a/meta-cli/src/cli/deploy.rs +++ b/meta-cli/src/cli/deploy.rs @@ -5,7 +5,7 @@ use self::actors::task::deploy::{DeployAction, DeployActionGenerator}; use self::actors::task::TaskConfig; use self::actors::task_manager::{self, StopReason, TaskReason}; use super::{Action, ConfigArgs, NodeArgs}; -use crate::com::store::{Command, Endpoint, MigrationAction, ServerStore}; +use crate::com::store::{Command, Endpoint, ServerStore}; use crate::config::Config; use crate::deploy::actors; use crate::deploy::actors::console::ConsoleActor; @@ -16,6 +16,7 @@ use crate::interlude::*; use crate::secrets::{RawSecrets, Secrets}; use actix_web::dev::ServerHandle; use clap::Parser; +use common::node::Node; use futures::channel::oneshot; use owo_colors::OwoColorize; @@ -92,6 +93,7 @@ pub struct DeployOptions { #[derive(Debug)] pub struct Deploy { config: Arc, + node: Node, base_dir: Arc, options: DeployOptions, secrets: RawSecrets, @@ -121,10 +123,10 @@ impl Deploy { .context("error while building node from config")?; ServerStore::with(Some(Command::Deploy), Some(config.as_ref().to_owned())); - ServerStore::set_migration_action_glob(MigrationAction { - create: deploy.options.create_migration, - reset: deploy.options.allow_destructive, // reset on drift - }); + // ServerStore::set_migration_action_glob(MigrationAction { + // create: deploy.options.create_migration, + // reset: deploy.options.allow_destructive, // reset on drift + // }); ServerStore::set_endpoint(Endpoint { typegate: node.base_url.clone().into(), auth: node.auth.clone(), @@ -146,6 +148,7 @@ impl Deploy { } Ok(Self { config, + node, base_dir: dir.clone(), options, secrets, @@ -218,7 +221,9 @@ enum ExitStatus { mod default_mode { //! non-watch mode - use crate::cli::deploy::default_mode::actors::task::TaskFinishStatus; + use crate::{cli::deploy::default_mode::actors::task::TaskFinishStatus, config::PathOption}; + + use self::actors::task::deploy::MigrationAction; use super::*; @@ -237,14 +242,27 @@ mod default_mode { let mut secrets = deploy.secrets.clone(); secrets.apply_overrides(&deploy.options.secrets)?; - ServerStore::set_secrets(secrets.hydrate(deploy.base_dir.clone()).await?); + // ServerStore::set_secrets(secrets.hydrate(deploy.base_dir.clone()).await?); // let (loader_event_tx, loader_event_rx) = mpsc::unbounded_channel(); let (report_tx, report_rx) = oneshot::channel(); let task_config = TaskConfig::init(deploy.base_dir.clone()); - let action_generator = DeployActionGenerator::new(task_config); + let action_generator = DeployActionGenerator { + task_config: task_config.into(), + node: deploy.node.clone().into(), + secrets: secrets.hydrate(deploy.base_dir.clone()).await?.into(), + migrations_dir: deploy + .config + .prisma_migrations_base_dir(PathOption::Absolute) + .into(), + default_migration_action: MigrationAction { + apply: true, + create: deploy.options.create_migration, + reset: deploy.options.allow_destructive, + }, + }; let task_manager = TaskManager::new( deploy.config.clone(), @@ -395,6 +413,10 @@ mod default_mode { } mod watch_mode { + use crate::config::PathOption; + + use self::actors::task::deploy::MigrationAction; + use super::*; #[tracing::instrument] @@ -418,13 +440,26 @@ mod watch_mode { .context("setting Ctrl-C handler")?; let task_config = TaskConfig::init(deploy.base_dir.clone()); - let action_generator = DeployActionGenerator::new(task_config); + let mut secrets = deploy.secrets.clone(); + secrets.apply_overrides(&deploy.options.secrets)?; + + let action_generator = DeployActionGenerator { + task_config: task_config.into(), + node: deploy.node.into(), + secrets: secrets.hydrate(deploy.base_dir.clone()).await?.into(), + migrations_dir: deploy + .config + .prisma_migrations_base_dir(PathOption::Absolute) + .into(), + default_migration_action: MigrationAction { + apply: true, + create: deploy.options.create_migration, + reset: deploy.options.allow_destructive, + }, + }; loop { - let mut secrets = deploy.secrets.clone(); - secrets.apply_overrides(&deploy.options.secrets)?; - - ServerStore::set_secrets(secrets.hydrate(deploy.base_dir.clone()).await?); + // ServerStore::set_secrets(secrets.hydrate(deploy.base_dir.clone()).await?); // let (loader_event_tx, loader_event_rx) = mpsc::unbounded_channel(); diff --git a/meta-cli/src/cli/serialize.rs b/meta-cli/src/cli/serialize.rs index e6263ccdbe..ebec4a304d 100644 --- a/meta-cli/src/cli/serialize.rs +++ b/meta-cli/src/cli/serialize.rs @@ -1,3 +1,9 @@ +use crate::deploy::actors::task::serialize::{ + SerializeAction, SerializeActionGenerator, SerializeError, +}; +use crate::deploy::actors::task::{TaskConfig, TaskFinishStatus}; +use crate::deploy::actors::task_manager::message::AddTask; +use crate::deploy::actors::task_manager::{TaskManager, TaskReason}; // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 use crate::interlude::*; @@ -12,6 +18,7 @@ use actix_web::dev::ServerHandle; use clap::Parser; use common::typegraph::Typegraph; use core::fmt::Debug; +use futures::channel::oneshot; use std::io::{self, Write}; use tokio::io::AsyncWriteExt; use tokio::sync::mpsc; @@ -54,7 +61,7 @@ impl Action for Serialize { #[tracing::instrument] async fn run(&self, args: ConfigArgs, server_handle: Option) -> Result<()> { let dir = args.dir(); - let config_path = args.config; + let config_path = args.config.clone(); // config file is not used when `TypeGraph` files // are provided in the CLI by flags @@ -72,49 +79,55 @@ impl Action for Serialize { let console = ConsoleActor::new(Arc::clone(&config)).start(); - let (loader_event_tx, loader_event_rx) = mpsc::unbounded_channel(); + let (report_tx, report_rx) = oneshot::channel(); - let loader = LoaderActor::new( - Arc::clone(&config), - console.clone(), - loader_event_tx, - self.max_parallel_loads.unwrap_or_else(num_cpus::get), - ) - .auto_stop() - .start(); + let action_generator = SerializeActionGenerator::new(TaskConfig::init(args.dir().into())); + // TODO fail_fast + let task_manager: Addr> = + TaskManager::new(config.clone(), action_generator, 1, report_tx, console) + .auto_stop() + .start(); if self.files.is_empty() { bail!("no file provided"); } for path in self.files.iter() { - use normpath::PathExt; - let path = dir.join(path).normalize()?.into_path_buf(); - if let Err(err) = crate::config::ModuleType::try_from(path.as_path()) { - bail!("file is not a valid module type: {err:#}") - } - loader.do_send(LoadModule(path.into())); + task_manager.do_send(AddTask { + path: path.as_path().into(), + reason: TaskReason::Discovery, + }); } - let mut loaded: Vec = vec![]; - let mut event_rx = loader_event_rx; - while let Some(event) = event_rx.recv().await { - match event { - LoaderEvent::Typegraph(tg_infos) => { - let tgs = ServerStore::get_responses_or_fail(&tg_infos.path)?; - for (_, tg) in tgs.iter() { - loaded.push(tg.as_typegraph()?); - } + let report = report_rx.await?; + // TODO no need to report errors + let tgs = report + .entries + .into_iter() + .map(|entry| match entry.status { + TaskFinishStatus::Finished(results) => results + .into_iter() + .collect::, SerializeError>>() + .map_err(|e| { + eyre::eyre!( + "serialization failed for typegraph '{}' at {:?}: {}", + e.typegraph, + entry.path, + e.error + ) + }), + TaskFinishStatus::Cancelled => { + Err(eyre::eyre!("serialization cancelled for {:?}", entry.path)) } - LoaderEvent::Stopped(res) => { - if let StopBehavior::ExitFailure(err) = res { - bail!("LoaderActor exit failure {err}"); - } + TaskFinishStatus::Error => { + Err(eyre::eyre!("serialization failed for {:?}", entry.path)) } - } - } + }) + .collect::>>()? + .into_iter() + .flatten() + .collect::>(); - let tgs = loaded; if let Some(tg_name) = self.typegraph.as_ref() { if let Some(tg) = tgs.iter().find(|tg| &tg.name().unwrap() == tg_name) { self.write(&self.to_string(&tg)?).await?; diff --git a/meta-cli/src/config.rs b/meta-cli/src/config.rs index 5aec1c1f31..29d6263920 100644 --- a/meta-cli/src/config.rs +++ b/meta-cli/src/config.rs @@ -27,6 +27,13 @@ lazy_static! { static ref DEFAULT_LOADER_CONFIG: TypegraphLoaderConfig = Default::default(); } +const DEFAULT_PRISMA_MIGRATIONS_PATH: &str = "prisma-migrations"; + +pub enum PathOption { + Absolute, + Relative, +} + #[derive(Deserialize, Debug, Clone, Default)] #[serde(untagged)] pub enum Lift { @@ -254,6 +261,21 @@ impl Config { .unwrap_or(&DEFAULT_LOADER_CONFIG) } + pub fn prisma_migrations_base_dir(&self, opt: PathOption) -> PathBuf { + let path = self + .typegraphs + .materializers + .prisma + .migrations_path + .as_deref() + .unwrap_or_else(|| Path::new(DEFAULT_PRISMA_MIGRATIONS_PATH)); + + match opt { + PathOption::Absolute => self.base_dir.join(path), + PathOption::Relative => path.to_path_buf(), + } + } + /// `config migration dir` + `runtime` + `tg_name` pub fn prisma_migrations_dir_rel(&self, tg_name: &str) -> PathBuf { let mut path = self diff --git a/meta-cli/src/deploy/actors/task.rs b/meta-cli/src/deploy/actors/task.rs index f0493cd43c..2f867b7cfc 100644 --- a/meta-cli/src/deploy/actors/task.rs +++ b/meta-cli/src/deploy/actors/task.rs @@ -8,8 +8,8 @@ //! Note: On the task process //! - stdout is used for logging and task output; each line is prefix by //! either one of "debug: ", "info: ", "warn: ", "error: " for logging, -//! or "output: " for JSON-serialized outputs like serialized typegraph -//! or deployment report. +//! or "success: "/"failure: " for reporting operation result (serialization, or +//! deployment) for each typegraph with for JSON-serialized data. //! - stderr is used for fatal errors that causes the program to exit; mainly //! unhandled exception in JavaScript or Python //! @@ -28,10 +28,12 @@ use crate::config::Config; use crate::interlude::*; use color_eyre::owo_colors::OwoColorize; use common::typegraph::Typegraph; +use futures::lock::Mutex; use process_wrap::tokio::TokioChildWrapper; +use serde::Deserialize; use std::time::Duration; -use tokio::io::{AsyncBufReadExt, BufReader, Lines}; -use tokio::process::{ChildStdout, Command}; +use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader, Lines}; +use tokio::process::{ChildStdin, ChildStdout, Command}; pub mod message { use super::*; @@ -62,6 +64,14 @@ pub mod message { #[derive(Message)] #[rtype(result = "()")] pub struct Stop; + + #[derive(Message)] + #[rtype(result = "()")] + pub(super) struct Rpc(pub RpcRequest); + + #[derive(Message)] + #[rtype(result = "()")] + pub(super) struct SendRpcResponse(pub RpcResponse); } use message::*; @@ -103,10 +113,13 @@ pub struct TaskActor { config: Arc, action: A, process: Option>, + // TODO separate i/o actor, and write queue instead of mutex + process_stdin: Option>>, task_manager: Addr>, console: Addr, collected_output: Vec>, timeout_duration: Duration, + followup_task: A::Followup, } impl TaskActor @@ -122,6 +135,7 @@ where Self { config, process: None, + process_stdin: None, task_manager, console, action, @@ -139,6 +153,7 @@ where }) .unwrap_or(DEFAULT_TIMEOUT), ), + followup_task: Default::default(), } } @@ -206,6 +221,7 @@ impl Handler for TaskActor { ctx.address().do_send(ProcessOutput { stdout }); + self.process_stdin = Some(Arc::new(Mutex::new(child.stdin().take().unwrap()))); self.process = Some(child); let addr = ctx.address(); @@ -318,6 +334,45 @@ enum OutputLevel { Error, } +#[derive(Deserialize, Debug)] +#[serde(tag = "method", content = "params")] +#[serde(rename_all = "camelCase")] +enum RpcCall { + QueryGlobalConfig, + QueryTypegraphConfig { typegraph: String }, +} + +#[derive(Serialize, Deserialize, Debug)] +enum JsonRpcVersion { + #[serde(rename = "2.0")] + V2, +} + +#[derive(Deserialize, Debug)] +struct RpcRequest { + jsonrpc: JsonRpcVersion, + id: u32, + #[serde(flatten)] + call: RpcCall, +} + +impl RpcRequest { + fn response(&self, result: serde_json::Value) -> RpcResponse { + RpcResponse { + jsonrpc: JsonRpcVersion::V2, + id: self.id, + result, + } + } +} + +#[derive(Serialize, Debug)] +struct RpcResponse { + jsonrpc: JsonRpcVersion, + id: u32, + result: serde_json::Value, +} + impl TaskActor { async fn loop_output_lines( mut reader: Lines>, @@ -367,6 +422,12 @@ impl TaskActor { continue; } + if let Some(req) = line.strip_prefix("jsonrpc: ") { + let req: RpcRequest = serde_json::from_str(req)?; + addr.do_send(message::Rpc(req)); + continue; + } + match latest_level { OutputLevel::Debug => { console.debug(format!("{scope}>{line}")); @@ -430,3 +491,91 @@ impl Handler for TaskActor { } } } + +impl Handler for TaskActor { + type Result = (); + + fn handle(&mut self, Rpc(req): Rpc, ctx: &mut Context) -> Self::Result { + let addr = ctx.address(); + match &req.call { + RpcCall::QueryGlobalConfig => { + let config = self.action.get_global_config(); + let response = req.response(config); + self.send_rpc_response(response, ctx); + // addr.do_send(SendRpcResponse(response)); + } + RpcCall::QueryTypegraphConfig { typegraph } => { + let config = self.action.get_typegraph_config(typegraph); + let response = req.response(config); + self.send_rpc_response(response, ctx); + // addr.do_send(SendRpcResponse(response)); + } + } + } +} + +impl TaskActor { + fn send_rpc_response(&mut self, response: RpcResponse, ctx: &mut Context) { + let response_id = response.id; + match serde_json::to_string(&response) { + Ok(response) => { + let stdin = self.process_stdin.clone().unwrap(); + let console = self.console.clone(); + let fut = async move { + let mut stdin = stdin.lock().await; + console.debug(format!("sending rpc response #{response_id}")); + stdin + .write_all(response.as_bytes()) + .await + .expect("could not write rpc response to process stdin"); + stdin + .write_all(b"\n") + .await + .expect("could not write newline to process stdin"); + }; + + ctx.spawn(fut.in_current_span().into_actor(self)); + } + Err(e) => { + self.console + .error(format!("could not serialize rpc response {e}")); + } + } + } +} + +// impl Handler for TaskActor { +// type Result = (); +// +// fn handle( +// &mut self, +// SendRpcResponse(response): SendRpcResponse, +// _ctx: &mut Context, +// ) -> Self::Result { +// { +// let response_id = response.id; +// match serde_json::to_string(&response) { +// Ok(response) => { +// let stdin = self.process_stdin.clone().unwrap_or_log(); +// let console = self.console.clone(); +// let fut = async move { +// let stdin = stdin.lock().await; +// console.debug(format!("sending rpc response #{response_id}")); +// stdin +// .write_all(response.as_bytes()) +// .await +// .expect("could not write rpc response to process stdin"); +// stdin +// .write_all(b"\n") +// .await +// .expect("could not write newline to process stdin"); +// }; +// } +// Err(e) => { +// self.console +// .error(format!("could not serialize rpc response {e}")); +// } +// } +// }; +// } +// } diff --git a/meta-cli/src/deploy/actors/task/action.rs b/meta-cli/src/deploy/actors/task/action.rs index 5703b0da6f..cf2aadfdd5 100644 --- a/meta-cli/src/deploy/actors/task/action.rs +++ b/meta-cli/src/deploy/actors/task/action.rs @@ -11,7 +11,12 @@ use tokio::{process::Command, sync::OwnedSemaphorePermit}; pub trait TaskActionGenerator: Clone { type Action: TaskAction; - fn generate(&self, path: Arc, permit: OwnedSemaphorePermit) -> Self::Action; + fn generate( + &self, + path: Arc, + followup: Option<::Followup>, + permit: OwnedSemaphorePermit, + ) -> Self::Action; } pub struct ActionFinalizeContext { @@ -25,9 +30,14 @@ pub trait OutputData: serde::de::DeserializeOwned + std::fmt::Debug + Unpin + Se fn get_typegraph_name(&self) -> String; } +pub trait FollowupTaskConfig { + fn schedule(&self, task_manager: Addr>); +} + pub trait TaskAction: std::fmt::Debug + Clone + Send + Unpin { type SuccessData: OutputData; type FailureData: OutputData; + type Followup: FollowupTaskConfig + Default + std::fmt::Debug + Unpin + Send; type Generator: TaskActionGenerator + Unpin; async fn get_command(&self) -> Result; @@ -37,6 +47,9 @@ pub trait TaskAction: std::fmt::Debug + Clone + Send + Unpin { fn get_start_message(&self) -> String; fn get_error_message(&self, err: &str) -> String; + fn get_global_config(&self) -> serde_json::Value; + fn get_typegraph_config(&self, typegraph: &str) -> serde_json::Value; + fn finalize(&self, res: &ActionResult, ctx: ActionFinalizeContext); } diff --git a/meta-cli/src/deploy/actors/task/command.rs b/meta-cli/src/deploy/actors/task/command.rs index d9ec0e4f0d..eb6e643fdd 100644 --- a/meta-cli/src/deploy/actors/task/command.rs +++ b/meta-cli/src/deploy/actors/task/command.rs @@ -41,7 +41,8 @@ impl CommandBuilder { "MCLI_SERVER_PORT", self.task_config.instance_port.to_string(), ) - .env("MCLI_TASK_ACTION", self.action_env) + .env("MCLI_ACTION", self.action_env) + .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()); diff --git a/meta-cli/src/deploy/actors/task/deploy.rs b/meta-cli/src/deploy/actors/task/deploy.rs index 75ab2a879b..aa6a8623b1 100644 --- a/meta-cli/src/deploy/actors/task/deploy.rs +++ b/meta-cli/src/deploy/actors/task/deploy.rs @@ -1,57 +1,126 @@ +mod migration_resolution; +mod migrations; + use super::action::{ - ActionFinalizeContext, ActionResult, OutputData, TaskAction, TaskActionGenerator, + ActionFinalizeContext, ActionResult, FollowupTaskConfig, OutputData, TaskAction, + TaskActionGenerator, }; use super::command::CommandBuilder; use super::TaskConfig; use crate::deploy::actors::console::Console; -use crate::deploy::push::pusher::{MessageEntry, Migrations}; +use crate::deploy::actors::task_manager::TaskManager; use crate::interlude::*; +use crate::secrets::Secrets; use color_eyre::owo_colors::OwoColorize; +use common::node::Node; use serde::Deserialize; use std::{path::Path, sync::Arc}; use tokio::{process::Command, sync::OwnedSemaphorePermit}; pub type DeployAction = Arc; +#[derive(Clone, Debug, Default, Serialize)] +pub struct MigrationAction { + pub apply: bool, // apply existing migrations + pub create: bool, // create new migrations + pub reset: bool, // reset database if necessary +} + +#[derive(Debug, PartialEq, Eq, Hash)] +pub struct PrismaRuntimeId { + pub typegraph: String, + pub name: String, +} + #[derive(Debug)] pub struct DeployActionInner { path: Arc, task_config: Arc, + node: Arc, + secrets: Arc, + migrations_dir: Arc, + migration_actions: HashMap, + default_migration_action: MigrationAction, #[allow(unused)] permit: OwnedSemaphorePermit, } #[derive(Clone)] pub struct DeployActionGenerator { - task_config: Arc, -} - -impl DeployActionGenerator { - pub fn new(task_config: TaskConfig) -> Self { - Self { - task_config: Arc::new(task_config), - } - } + pub task_config: Arc, + pub node: Arc, + pub secrets: Arc, + pub migrations_dir: Arc, + pub default_migration_action: MigrationAction, } impl TaskActionGenerator for DeployActionGenerator { type Action = DeployAction; - fn generate(&self, path: Arc, permit: OwnedSemaphorePermit) -> Self::Action { + fn generate( + &self, + path: Arc, + followup: Option, + permit: OwnedSemaphorePermit, + ) -> Self::Action { + let (default_migration_action, migration_actions) = if let Some(followup) = followup { + ( + Default::default(), + followup + .migrations + .into_iter() + .map(|(runtime, action_override)| { + ( + runtime, + MigrationAction { + reset: matches!( + action_override, + MigrationActionOverride::ResetDatabase + ), + ..Default::default() + }, + ) + }) + .collect(), + ) + } else { + (self.default_migration_action.clone(), HashMap::new()) + }; + DeployActionInner { path, task_config: self.task_config.clone(), + node: self.node.clone(), + secrets: self.secrets.clone(), + migrations_dir: self.migrations_dir.clone(), + migration_actions, + default_migration_action, permit, } .into() } } +#[derive(Deserialize, Debug)] +#[serde(rename_all = "snake_case", tag = "type", content = "text")] +pub enum MessageEntry { + Info(String), + Warning(String), + Error(String), +} + +#[derive(Deserialize, Debug)] +pub struct Migration { + pub runtime: String, + #[serde(rename = "migrations")] + pub archive: String, +} + #[derive(Deserialize, Debug)] pub struct DeploySuccess { pub typegraph: String, pub messages: Vec, - pub migrations: Vec, + pub migrations: Vec, pub failure: Option, } @@ -75,10 +144,21 @@ impl OutputData for DeployError { } } +#[derive(Clone, Debug)] +pub enum MigrationActionOverride { + ResetDatabase, +} + +#[derive(Debug, Default)] +pub struct FollowupDeployConfig { + pub migrations: Vec<(PrismaRuntimeId, MigrationActionOverride)>, +} + impl TaskAction for DeployAction { type SuccessData = DeploySuccess; type FailureData = DeployError; type Generator = DeployActionGenerator; + type Followup = FollowupDeployConfig; async fn get_command(&self) -> Result { CommandBuilder { @@ -129,30 +209,18 @@ impl TaskAction for DeployAction { let tg_name = data.get_typegraph_name(); - let migdir = ctx.config.prisma_migration_dir_abs(&data.typegraph); - for migrations in data.migrations.iter() { - let dest = migdir.join(&migrations.runtime); - if let Err(err) = - common::archive::unpack(&dest, Some(migrations.migrations.clone())) - { + self.unpack_migrations(&tg_name, &data.migrations, &ctx, &scope); + + match &data.failure { + Some(failure) => { ctx.console.error(format!( - "error while unpacking migrations into {:?}", - migdir - )); - ctx.console.error(format!("{err:?}")); - } else { - ctx.console.info(format!( - "{scope} unpacked migrations for {}/{} at {}", - tg_name.cyan(), - migrations.runtime, - dest.display().bold() + "{icon} error while deploying typegraph {name} from {path}", + icon = "✗".red(), + name = tg_name.cyan(), + path = self.path.display().yellow(), )); - } - } - match data.failure { - Some(_) => { - todo!(); + self.handle_push_failure(&tg_name, failure, &ctx, &scope); } None => { ctx.console.info(format!( @@ -176,4 +244,43 @@ impl TaskAction for DeployAction { } } } + + fn get_global_config(&self) -> serde_json::Value { + serde_json::json!({ + "typegate": { + "endpoint": self.node.base_url, + "auth": self.node.auth, + }, + "prefix": self.node.prefix, + }) + } + + fn get_typegraph_config(&self, typegraph: &str) -> serde_json::Value { + let migration_actions = self + .migration_actions + .iter() + .filter_map(|(runtime, action)| { + if runtime.typegraph == typegraph { + Some((runtime.name.clone(), action.clone())) + } else { + None + } + }) + .collect::>(); + + serde_json::json!({ + "secrets": self.secrets.get(typegraph), + "artifactResolution": true, + "migrationActions": migration_actions, + "defaultMigrationAction": self.default_migration_action, + "migrationsDir": self.migrations_dir.to_path_buf().join(typegraph), + }) + } +} + +impl FollowupTaskConfig for FollowupDeployConfig { + fn schedule(&self, task_manager: Addr>) { + todo!(); + // task_manager.do_send(AddFollowupTask) + } } diff --git a/meta-cli/src/deploy/actors/task/deploy/migration_resolution.rs b/meta-cli/src/deploy/actors/task/deploy/migration_resolution.rs new file mode 100644 index 0000000000..139597f9cb --- /dev/null +++ b/meta-cli/src/deploy/actors/task/deploy/migration_resolution.rs @@ -0,0 +1,2 @@ + + diff --git a/meta-cli/src/deploy/actors/task/deploy/migrations.rs b/meta-cli/src/deploy/actors/task/deploy/migrations.rs new file mode 100644 index 0000000000..ccd36a38e6 --- /dev/null +++ b/meta-cli/src/deploy/actors/task/deploy/migrations.rs @@ -0,0 +1,478 @@ +use color_eyre::owo_colors::OwoColorize; + +use super::{DeployAction, DeployActionInner, Migration, MigrationActionOverride, PrismaRuntimeId}; +use crate::deploy::actors::console::Console; +use crate::deploy::actors::console::input::{ConfirmHandler, Confirm, Select}; +use crate::deploy::actors::task::TaskActor; +use crate::deploy::actors::task::action::ActionFinalizeContext; +use crate::interlude::*; + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +#[allow(unused)] +pub struct DatabaseResetRequired { + message: String, + runtime_name: String, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +#[allow(unused)] +pub struct NullConstraintViolation { + message: String, + runtime_name: String, + column: String, + migration_name: String, + is_new_column: bool, + table: String, +} + +#[allow(unused)] +struct ResolveNullConstraintViolation { + failure: NullConstraintViolation, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct GenericPushFailure { + message: String, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(tag = "reason")] +enum PushFailure { + Unknown(GenericPushFailure), + DatabaseResetRequired(DatabaseResetRequired), + NullConstraintViolation(NullConstraintViolation), +} + +impl DeployActionInner { + pub(super) fn unpack_migrations( + &self, + tg_name: &str, + migrations: &[Migration], + ctx: &ActionFinalizeContext>, + scope: &impl std::fmt::Display, + ) { + let migdir = ctx.config.prisma_migration_dir_abs(tg_name); + + for migration in migrations.iter() { + let dest = migdir.join(&migration.runtime); + if let Err(err) = common::archive::unpack(&dest, Some(&migration.archive)) { + ctx.console.error(format!( + "{scope} error while unpacking migrations into {:?}", + migdir + )); + ctx.console.error(format!("{err:?}")); + } else { + ctx.console.info(format!( + "{scope} unpacked migrations for {}/{} at {}", + tg_name.cyan(), + migration.runtime, + dest.display().bold() + )); + } + } + } + + pub(super) fn handle_push_failure( + &self, + tg_name: &str, + failure_raw: &str, + ctx: &ActionFinalizeContext>, + scope: &impl std::fmt::Display, + ) { + let failure = serde_json::from_str::(failure_raw); + match failure { + Ok(PushFailure::Unknown(error)) => { + ctx.console.error(format!( + "{scope} unknown error: {msg}", + scope = scope, + msg = error.message, + )); + } + + Ok(PushFailure::DatabaseResetRequired(error)) => { + ctx.task.do_send(message::ConfirmDatabaseReset { + typegraph: tg_name.to_string(), + runtime: error.runtime_name.clone(), + message: error.message.clone(), + }); + } + + Ok(PushFailure::NullConstraintViolation(error)) => { + ctx.task + .do_send(message::ResolveConstraintViolation { + typegraph: tg_name.to_string(), + runtime: error.runtime_name.clone(), + column: error.column.clone(), + migration: error.migration_name.clone(), + is_new_column: error.is_new_column, + table: error.table.clone(), + message: error.message.clone(), + }); + } + + Err(err) => { + ctx.console.error(format!( + "{scope} failed to parse push failure data: {err:?}", + scope = scope, + err = err + )); + } + } + } +} + +pub mod message { + use super::*; + + #[derive(Message)] + #[rtype(result = "()")] + pub(super) struct ConfirmDatabaseReset { + pub typegraph: String, + pub runtime: String, + pub message: String, + } + + #[derive(Message)] + #[rtype(result = "()")] + pub(super) struct ResetDatabase { + pub typegraph: String, + pub runtime: String, + } + + #[derive(Message)] + #[rtype(result = "()")] + pub (super) struct ResolveConstraintViolation { + pub typegraph: String, + pub runtime: String, + pub column: String, + pub migration: String, + pub is_new_column: bool, + pub table: String, + pub message: String, + } + + #[derive(Message)] + #[rtype(result = "()")] + pub(super) struct RemoveLatestMigration { + pub typegraph: String, + pub runtime: String, + pub migration: String, + } + + #[derive(Message)] + #[rtype(result = "()")] + pub(super) struct WaitForManualResolution { + pub typegraph: String, + pub runtime: String, + pub migration: String, + } + +} + +use message::*; + +#[derive(Debug)] +pub struct ConfirmDatabaseResetRequired { + pub task: Addr>, + pub tg_name: String, + pub runtime_name: String, +} + +impl ConfirmHandler for ConfirmDatabaseResetRequired { + fn on_confirm(&self) { + self.task.do_send(message::ResetDatabase { + typegraph: self.tg_name.clone(), + runtime: self.runtime_name.clone(), + }) + } +} + +impl Handler for TaskActor { + type Result = (); + + fn handle(&mut self, msg: ConfirmDatabaseReset, ctx: &mut Self::Context) { + let ConfirmDatabaseReset { + typegraph, + runtime, + message, + } = msg; + let scope = format!("({})", self.action.path.display()); + + self.console.error(format!("{scope} {message}")); + self.console.warning(format!( + "{scope} database reset required for prisma runtime {rt} in typegraph {name}", + scope = scope.yellow(), + name = typegraph.cyan(), + rt = runtime.magenta(), + )); + + let console = self.console.clone(); + let addr = ctx.address(); + + let fut = async move { + let res = Confirm::new( + console.clone(), + format!( + "{scope} Do you want to reset the database for prisma runtime {rt} in typegraph {name}?", + scope = scope.yellow(), + name = typegraph.cyan(), + rt = runtime.magenta(), + ), + ).interact( + Box::new(ConfirmDatabaseResetRequired { + task: addr, + tg_name: typegraph, + runtime_name: runtime, + }) + + ).await; + + if let Err(err) = res { + console.error(format!("failed to read user input: {err}", err = err)); + } + + }; + ctx.spawn(fut.in_current_span().into_actor(self)); +}} + +impl Handler for TaskActor { + type Result = (); + + fn handle(&mut self, msg: ResetDatabase, _: &mut Self::Context) { + self.followup_task.migrations.push(( PrismaRuntimeId { + typegraph: msg.typegraph.clone(), + name: msg.runtime.clone(), + }, + MigrationActionOverride::ResetDatabase, + )); + + // /// Set `reset` to `true` for the specified prisma runtime + re-run the typegraph + // fn do_force_reset(loader: &Addr, tg_path: PathBuf, runtime_name: String) { + // // reset + // let glob_cfg = ServerStore::get_migration_action_glob(); + // ServerStore::set_migration_action( + // tg_path.clone(), + // RuntimeMigrationAction { + // runtime_name, + // action: MigrationAction { + // reset: true, // ! + // create: glob_cfg.create, + // }, + // }, + // ); + // + // // reload + // loader.do_send(LoadModule(tg_path.into())); + // } + } +} + +impl Handler for TaskActor { + type Result = (); + + fn handle(&mut self, msg: ResolveConstraintViolation, ctx: &mut Self::Context) { + let ResolveConstraintViolation { + typegraph, + runtime, + column, + migration, + is_new_column, + table, + message, + } = msg; + + let scope = format!("({})", self.action.path.display()); + let scope = scope.yellow(); + + self.console.error(format!("{scope} {message}")); + + if is_new_column { + self.console.info(format!("{scope} manually edit the migration {migration} or remove the migration and set a default value")); + + + let remove_latest = options::RemoveLatestMigration { + task: ctx.address(), + typegraph: typegraph.clone(), + runtime: runtime.clone(), + migration: migration.clone(), + }; + + let manual = options::ManualResolution { + task: ctx.address(), + typegraph: typegraph.clone(), + runtime: runtime.clone(), + migration: migration.clone(), + message: Some(format!( + "Set a default value for the column `{}` in the table `{}`", + column, table + )), + }; + + let reset = options::ForceReset { + task: ctx.address(), + typegraph: typegraph.clone(), + runtime: runtime.clone(), + }; + + let fut = async move { + let res = Select::new(self.console.clone(), "Choose one of the following options".to_string()).interact(&[Box::new(remove_latest), Box::new(manual), Box::new(reset)]).await; + if let Err(err) = res { + self.console.error(format!("failed to read user input: {err}", err = err)); + } + + }; + + } +} +} + +impl Handler for TaskActor { + type Result = (); + + fn handle(&mut self, msg: message::RemoveLatestMigration, ctx: &mut Self::Context) { + let message::RemoveLatestMigration { + typegraph, + runtime, + migration, + } = msg; + + let migration_path = self.config.prisma_migration_dir_abs(&typegraph).join(&runtime).join(&migration); + + // let typegraph = typegraph.clone(); + // let runtime_name = runtime.clone(); + let console = self.console.clone(); + let typegraph_path = self.action.path.clone(); + let addr = ctx.address(); + + let fut = async move { + let res = tokio::fs::remove_dir_all(&migration_path).await; + match res { + Ok(_) => { + console.info(format!("Removed migration directory: {:?}", migration_path)); + console.info(format!( + "You can now update your typegraph at {} to create an alternative non-breaking schema.", + typegraph_path.display().to_string().bold() + )); + + addr.do_send(message::ResetDatabase { + typegraph, + runtime, + }); + } + Err(err) => { + console.error(format!("Failed to remove migration directory: {:?}", migration_path)); + console.error(format!("{err}", err = err)); + } + } + }; + + ctx.spawn(fut.in_current_span().into_actor(self)); + } +} + +impl Handler for TaskActor { + type Result = (); + + fn handle(&mut self, msg: message::WaitForManualResolution, ctx: &mut Self::Context) { + let migration_path = self.config.prisma_migration_dir_abs(&msg.typegraph).join(&msg.runtime).join(msg.migration).join("migration.sql"); + eprintln!("Edit the migration file at {:?} then press enter to continue...", migration_path); + + let console = self.console.clone(); + let addr = ctx.address(); + + let fut = async move { + console.read_line().await; + addr.do_send(message::ResetDatabase { + typegraph: msg.typegraph, + runtime: msg.runtime, + }); + }; + ctx.spawn(fut.in_current_span().into_actor(self)); + } +} + +mod options { + use crate::deploy::actors::console::input::{SelectOption, OptionLabel}; + use crate::deploy::actors::task::TaskActor; + use crate::deploy::actors::task::deploy::DeployAction; + use crate::interlude::*; + +#[derive(Debug)] +pub struct RemoveLatestMigration { + pub task: Addr>, + pub typegraph: String, + pub runtime: String, + pub migration: String, // is this necessary?? +} + + +impl SelectOption for RemoveLatestMigration { + fn on_select(&self) { + self.task.do_send(super::message::RemoveLatestMigration { + typegraph: self.typegraph.clone(), + runtime: self.runtime.clone(), + migration: self.migration.clone(), + }); + } + + fn label(&self) -> OptionLabel<'_> { + OptionLabel::new("Remove the latest migration.") + } +} + +#[derive(Debug)] +pub struct ManualResolution { + pub task: Addr>, + pub typegraph: String, + pub runtime: String, + pub migration: String, + pub message: Option, +} + +impl SelectOption for ManualResolution { + fn on_select(&self) { + self.task.do_send(super::message::WaitForManualResolution { + typegraph: self.typegraph.clone(), + runtime: self.runtime.clone(), + migration: self.migration.clone(), + }); + } + + fn label(&self) -> OptionLabel<'_> { + let label = OptionLabel::new("Manually resolve the migration."); + if let Some(message) = &self.message { + label.with_secondary(format!("Edit the migration file: {}.", message)) + } else { + label + } + } +} + +#[derive(Debug)] +pub struct ForceReset { + pub task: Addr>, + pub typegraph: String, + pub runtime: String, +} + +impl SelectOption for ForceReset { + fn on_select(&self) { + self.task.do_send(super::message::ResetDatabase { + typegraph: self.typegraph.clone(), + runtime: self.runtime.clone(), + }); + } + + fn label(&self) -> OptionLabel<'_> { + OptionLabel::new("Force reset the development database.").with_secondary( + "Warning: The failed migration will potentially fail again in deployment.", + ) + } +} + + +} + diff --git a/meta-cli/src/deploy/actors/task/serialize.rs b/meta-cli/src/deploy/actors/task/serialize.rs index 7cb93b2727..a249e3823f 100644 --- a/meta-cli/src/deploy/actors/task/serialize.rs +++ b/meta-cli/src/deploy/actors/task/serialize.rs @@ -1,9 +1,12 @@ use super::action::{ - ActionFinalizeContext, ActionResult, OutputData, TaskAction, TaskActionGenerator, + ActionFinalizeContext, ActionResult, FollowupTaskConfig, OutputData, TaskAction, + TaskActionGenerator, }; use super::command::CommandBuilder; use super::TaskConfig; +use crate::com::store::MigrationAction; use crate::deploy::actors::console::Console; +use crate::deploy::actors::task_manager::TaskManager; use crate::interlude::*; use color_eyre::owo_colors::OwoColorize; use common::typegraph::Typegraph; @@ -37,7 +40,12 @@ impl SerializeActionGenerator { impl TaskActionGenerator for SerializeActionGenerator { type Action = SerializeAction; - fn generate(&self, path: Arc, permit: OwnedSemaphorePermit) -> Self::Action { + fn generate( + &self, + path: Arc, + followup: Option<()>, + permit: OwnedSemaphorePermit, + ) -> Self::Action { SerializeActionInner { path, task_config: self.task_config.clone(), @@ -49,11 +57,11 @@ impl TaskActionGenerator for SerializeActionGenerator { #[derive(Deserialize, Debug)] pub struct SerializeError { - typegraph: String, - error: String, + pub typegraph: String, + pub error: String, } -impl OutputData for Typegraph { +impl OutputData for Box { fn get_typegraph_name(&self) -> String { self.name().unwrap() } @@ -65,10 +73,15 @@ impl OutputData for SerializeError { } } +impl FollowupTaskConfig for () { + fn schedule(&self, _task_manager: Addr>>) {} +} + impl TaskAction for SerializeAction { - type SuccessData = Typegraph; + type SuccessData = Box; type FailureData = SerializeError; type Generator = SerializeActionGenerator; + type Followup = (); async fn get_command(&self) -> Result { CommandBuilder { @@ -122,4 +135,20 @@ impl TaskAction for SerializeAction { } } } + + fn get_global_config(&self) -> serde_json::Value { + serde_json::json!({ + "typegate": None::, + "prefix": None::, + }) + } + fn get_typegraph_config(&self, typegraph: &str) -> serde_json::Value { + serde_json::json!({ + "secrets": {}, + "artifactResolution": true, // TODO?? + "migrationActions": {}, + "defaultMigrationAction": MigrationAction::default(), + "migrationsDir": ".", // TODO + }) + } } diff --git a/meta-cli/src/deploy/actors/task_manager.rs b/meta-cli/src/deploy/actors/task_manager.rs index 4b626d408f..ffbc78a81c 100644 --- a/meta-cli/src/deploy/actors/task_manager.rs +++ b/meta-cli/src/deploy/actors/task_manager.rs @@ -221,9 +221,11 @@ impl Handler for TaskManager { } } } - let action = self - .action_generator - .generate(message.path.clone(), message.permit); + let action = self.action_generator.generate( + message.path.clone(), + Default::default(), // TODO + message.permit, + ); let path = action.get_path_owned(); let task_addr = TaskActor::new( self.config.clone(), diff --git a/meta-cli/src/deploy/push/migration_resolution.rs b/meta-cli/src/deploy/push/migration_resolution.rs index afc1e7e48a..d2104dbdf6 100644 --- a/meta-cli/src/deploy/push/migration_resolution.rs +++ b/meta-cli/src/deploy/push/migration_resolution.rs @@ -35,138 +35,6 @@ impl ConfirmHandler for ConfirmDatabaseResetRequired { // NullConstraintViolation failure -#[derive(Debug)] -pub struct ForceReset { - pub loader: Addr, - pub typegraph_path: PathBuf, - pub runtime_name: String, -} - -impl SelectOption for ForceReset { - fn on_select(&self) { - let tg_path = self.typegraph_path.clone(); - let runtime_name = self.runtime_name.clone(); - do_force_reset(&self.loader, tg_path, runtime_name); - } - - fn label(&self) -> OptionLabel<'_> { - OptionLabel::new("Force reset the development database.").with_secondary( - "Warning: The failed migration will potentially fail again in deployment.", - ) - } -} - -#[derive(Debug)] -pub struct RemoveLatestMigration { - pub loader: Addr, - pub typegraph_path: PathBuf, - pub migration_dir: PathBuf, - pub runtime_name: String, - pub migration_name: String, // is this necessary?? - pub console: Addr, -} - -impl RemoveLatestMigration { - pub async fn apply( - migration_path: &Path, - typegraph_path: &Path, - runtime_name: String, - console: Addr, - loader: Addr, - ) -> Result<()> { - tokio::fs::remove_dir_all(migration_path).await?; // ! - - console.info(format!("Removed migration directory: {:?}", migration_path)); - console.info(format!( - "You can now update your typegraph at {} to create an alternative non-breaking schema.", - typegraph_path.display().to_string().bold() - )); - - let tg_path = typegraph_path.to_path_buf(); - let runtime_name = runtime_name.clone(); - do_force_reset(&loader, tg_path, runtime_name); - - Ok(()) - } -} - -impl SelectOption for RemoveLatestMigration { - fn on_select(&self) { - let migration_path = self - .migration_dir - .join(&self.runtime_name) - .join(&self.migration_name); - - let runtime_name = self.runtime_name.clone(); - let console = self.console.clone(); - let loader = self.loader.clone(); - let typegraph_path = self.typegraph_path.clone(); - - Arbiter::current().spawn(async move { - if let Err(err) = Self::apply( - &migration_path, - &typegraph_path, - runtime_name, - console.clone(), - loader, - ) - .await - { - console.warning(format!("Migration Path {}", migration_path.display())); - console.error(err.to_string()); - panic!("{err:?}"); // could occur if the latest migration does not match - } - }); - } - - fn label(&self) -> OptionLabel<'_> { - OptionLabel::new("Remove the latest migration.") - } -} - -#[derive(Debug)] -pub struct ManualResolution { - pub loader: Addr, - pub typegraph_path: PathBuf, - pub migration_dir: PathBuf, - pub runtime_name: String, - pub migration_name: String, - pub message: Option, - pub console: Addr, -} - -impl SelectOption for ManualResolution { - fn on_select(&self) { - let mig_path = self - .migration_dir - .join(format!("{}/migration.sql", self.migration_name)); - eprint!( - "Edit the migration file at {:?} then press enter to continue...", - mig_path - ); - - let console = self.console.clone(); - let runtime_name = self.runtime_name.clone(); - let typegraph_path = self.typegraph_path.clone(); - let loader = self.loader.clone(); - - Arbiter::current().spawn(async move { - // TODO watch migration file?? - console.read_line().await; - do_force_reset(&loader, typegraph_path, runtime_name); - }); - } - - fn label(&self) -> OptionLabel<'_> { - let label = OptionLabel::new("Manually resolve the migration."); - if let Some(message) = &self.message { - label.with_secondary(format!("Edit the migration file: {}.", message)) - } else { - label - } - } -} - /// Set `reset` to `true` for the specified prisma runtime + re-run the typegraph fn do_force_reset(loader: &Addr, tg_path: PathBuf, runtime_name: String) { // reset diff --git a/meta-cli/src/deploy/push/pusher.rs b/meta-cli/src/deploy/push/pusher.rs index 1ec4e1e9d9..08903b5f3b 100644 --- a/meta-cli/src/deploy/push/pusher.rs +++ b/meta-cli/src/deploy/push/pusher.rs @@ -5,20 +5,15 @@ use crate::interlude::*; use std::sync::Mutex; use std::time::Duration; -use actix::prelude::*; -use owo_colors::OwoColorize; use serde::Deserialize; use crate::com::{responses::SDKResponse, store::ServerStore}; use crate::deploy::actors::console::input::{Confirm, Select}; use crate::deploy::actors::console::{Console, ConsoleActor}; use crate::deploy::actors::loader::LoaderActor; -use crate::deploy::push::migration_resolution::{ManualResolution, RemoveLatestMigration}; use lazy_static::lazy_static; -use super::migration_resolution::{ConfirmDatabaseResetRequired, ForceReset}; - #[derive(Deserialize, Debug)] #[serde(rename_all = "snake_case")] pub enum MessageType { @@ -61,7 +56,7 @@ pub struct PushResultRaw { #[derive(Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] #[allow(unused)] -struct DatabaseResetRequired { +pub struct DatabaseResetRequired { message: String, runtime_name: String, } @@ -102,206 +97,206 @@ pub struct PushResult { sdk_response: SDKResponse, } -impl PushResult { - pub fn new( - console: Addr, - loader: Addr, - sdk_response: SDKResponse, - ) -> Result { - let raw = sdk_response - .as_push_result() - .wrap_err("SDK error pushing to typegate")?; - - let failure = match raw.failure { - Some(failure) => Some(serde_json::from_str(&failure)?), - None => None, - }; - - Ok(Self { - name: raw.name, - messages: raw.messages, - migrations: raw.migrations, - failure, - original_name: sdk_response.typegraph_name.clone(), - console, - loader, - sdk_response, - }) - } - - #[tracing::instrument] - pub async fn finalize(&self) -> Result<()> { - let name = self.name.clone(); - let print_failure = || { - self.console.error(format!( - "{} Error encountered while pushing {name}.", - "✕".red(), - name = name.cyan() - )); - }; - - let print_success = || { - self.console.info(format!( - "{} Successfully pushed typegraph {name}.", - "✓".green(), - name = name.cyan() - )); - }; - - // tg workdir + prisma_migration_rel - let migdir = ServerStore::get_config() - .unwrap() - .prisma_migration_dir_abs(&self.original_name); - - for migrations in self.migrations.iter() { - let dest = migdir.join(&migrations.runtime); - if let Err(err) = common::archive::unpack(&dest, Some(migrations.migrations.clone())) { - self.console.error(format!( - "error while unpacking migrations into {:?}", - migdir - )); - self.console.error(format!("{err:?}")); - } else { - self.console.info(format!( - "Successfully unpacked migrations for {name}/{} at {:?}", - migrations.runtime, dest - )); - } - } - - if let Some(failure) = self.failure.clone() { - print_failure(); - match failure { - PushFailure::Unknown(fail) => { - self.console.error(format!( - "Unknown error while pushing typegraph {tg_name}\n{msg}", - tg_name = name.cyan(), - msg = fail.message - )); - } - PushFailure::DatabaseResetRequired(failure) => { - handle_database_reset( - self.console.clone(), - self.loader.clone(), - failure, - self.sdk_response.clone(), - ) - .await? - } - PushFailure::NullConstraintViolation(failure) => { - handle_null_constraint_violation( - self.console.clone(), - self.loader.clone(), - failure, - self.sdk_response.clone(), - migdir.clone(), - ) - .await? - } - } - } else { - print_success(); - } - Ok(()) - } -} - -// DatabaseReset Handler + interactivity - -#[tracing::instrument] -async fn handle_database_reset( - console: Addr, - loader: Addr, - failure: DatabaseResetRequired, - sdk_response: SDKResponse, -) -> Result<()> { - let DatabaseResetRequired { - message, - runtime_name, - } = failure; - - let name = sdk_response.typegraph_name.clone(); - - console.error(message); - console.warning(format!( - "Database reset required for prisma runtime {rt} in typegraph {name}", - rt = runtime_name.magenta(), - )); - - let rt = runtime_name.clone(); - let _ = Confirm::new( - console, - format!("Do you want to reset the database for runtime {rt} on {name}?"), - ) - .interact(Box::new(ConfirmDatabaseResetRequired { - typegraph_path: sdk_response.typegraph_path, - runtime_name, - loader, - })) - .await?; - - Ok(()) -} - -// NullConstraintViolation Handler + interactivity - -#[tracing::instrument] -pub async fn handle_null_constraint_violation( - console: Addr, - loader: Addr, - failure: NullConstraintViolation, - sdk_response: SDKResponse, - migration_dir: PathBuf, -) -> Result<()> { - let NullConstraintViolation { - message, - runtime_name, - migration_name, - is_new_column, - column, - table, - } = failure; - - console.error(message); - - if is_new_column { - console.info(format!("manually edit the migration {migration_name}; or remove the migration and add set a default value")); - - let remove_latest = RemoveLatestMigration { - loader: loader.clone(), - typegraph_path: sdk_response.typegraph_path.clone(), - migration_dir: migration_dir.clone(), - runtime_name: runtime_name.clone(), - migration_name: migration_name.clone(), - console: console.clone(), - }; - - let manual = ManualResolution { - loader: loader.clone(), - typegraph_path: sdk_response.typegraph_path.clone(), - migration_dir: migration_dir.clone(), - runtime_name: runtime_name.clone(), - migration_name: migration_name.clone(), - message: Some(format!( - "Set a default value for the column `{}` in the table `{}`", - column, table - )), - console: console.clone(), - }; - - let reset = ForceReset { - loader: loader.clone(), - runtime_name: runtime_name.clone(), - typegraph_path: sdk_response.typegraph_path.clone(), - }; - - let _ = Select::new(console, "Choose one of the following options".to_string()) - .interact(&[Box::new(remove_latest), Box::new(manual), Box::new(reset)]) - .await?; - } - - Ok(()) -} +// impl PushResult { +// pub fn new( +// console: Addr, +// loader: Addr, +// sdk_response: SDKResponse, +// ) -> Result { +// let raw = sdk_response +// .as_push_result() +// .wrap_err("SDK error pushing to typegate")?; +// +// let failure = match raw.failure { +// Some(failure) => Some(serde_json::from_str(&failure)?), +// None => None, +// }; +// +// Ok(Self { +// name: raw.name, +// messages: raw.messages, +// migrations: raw.migrations, +// failure, +// original_name: sdk_response.typegraph_name.clone(), +// console, +// loader, +// sdk_response, +// }) +// } +// +// #[tracing::instrument] +// pub async fn finalize(&self) -> Result<()> { +// let name = self.name.clone(); +// let print_failure = || { +// self.console.error(format!( +// "{} Error encountered while pushing {name}.", +// "✕".red(), +// name = name.cyan() +// )); +// }; +// +// let print_success = || { +// self.console.info(format!( +// "{} Successfully pushed typegraph {name}.", +// "✓".green(), +// name = name.cyan() +// )); +// }; +// +// // tg workdir + prisma_migration_rel +// let migdir = ServerStore::get_config() +// .unwrap() +// .prisma_migration_dir_abs(&self.original_name); +// +// for migrations in self.migrations.iter() { +// let dest = migdir.join(&migrations.runtime); +// if let Err(err) = common::archive::unpack(&dest, Some(migrations.migrations.clone())) { +// self.console.error(format!( +// "error while unpacking migrations into {:?}", +// migdir +// )); +// self.console.error(format!("{err:?}")); +// } else { +// self.console.info(format!( +// "Successfully unpacked migrations for {name}/{} at {:?}", +// migrations.runtime, dest +// )); +// } +// } +// +// if let Some(failure) = self.failure.clone() { +// print_failure(); +// match failure { +// PushFailure::Unknown(fail) => { +// self.console.error(format!( +// "Unknown error while pushing typegraph {tg_name}\n{msg}", +// tg_name = name.cyan(), +// msg = fail.message +// )); +// } +// PushFailure::DatabaseResetRequired(failure) => { +// handle_database_reset( +// self.console.clone(), +// self.loader.clone(), +// failure, +// self.sdk_response.clone(), +// ) +// .await? +// } +// PushFailure::NullConstraintViolation(failure) => { +// handle_null_constraint_violation( +// self.console.clone(), +// self.loader.clone(), +// failure, +// self.sdk_response.clone(), +// migdir.clone(), +// ) +// .await? +// } +// } +// } else { +// print_success(); +// } +// Ok(()) +// } +// } +// +// // DatabaseReset Handler + interactivity +// +// #[tracing::instrument] +// async fn handle_database_reset( +// console: Addr, +// loader: Addr, +// failure: DatabaseResetRequired, +// sdk_response: SDKResponse, +// ) -> Result<()> { +// let DatabaseResetRequired { +// message, +// runtime_name, +// } = failure; +// +// let name = sdk_response.typegraph_name.clone(); +// +// console.error(message); +// console.warning(format!( +// "Database reset required for prisma runtime {rt} in typegraph {name}", +// rt = runtime_name.magenta(), +// )); +// +// let rt = runtime_name.clone(); +// let _ = Confirm::new( +// console, +// format!("Do you want to reset the database for runtime {rt} on {name}?"), +// ) +// .interact(Box::new(ConfirmDatabaseResetRequired { +// typegraph_path: sdk_response.typegraph_path, +// runtime_name, +// loader, +// })) +// .await?; +// +// Ok(()) +// } +// +// // NullConstraintViolation Handler + interactivity +// +// #[tracing::instrument] +// pub async fn handle_null_constraint_violation( +// console: Addr, +// loader: Addr, +// failure: NullConstraintViolation, +// sdk_response: SDKResponse, +// migration_dir: PathBuf, +// ) -> Result<()> { +// let NullConstraintViolation { +// message, +// runtime_name, +// migration_name, +// is_new_column, +// column, +// table, +// } = failure; +// +// console.error(message); +// +// if is_new_column { +// console.info(format!("manually edit the migration {migration_name}; or remove the migration and add set a default value")); +// +// let remove_latest = RemoveLatestMigration { +// loader: loader.clone(), +// typegraph_path: sdk_response.typegraph_path.clone(), +// migration_dir: migration_dir.clone(), +// runtime_name: runtime_name.clone(), +// migration_name: migration_name.clone(), +// console: console.clone(), +// }; +// +// let manual = ManualResolution { +// loader: loader.clone(), +// typegraph_path: sdk_response.typegraph_path.clone(), +// migration_dir: migration_dir.clone(), +// runtime_name: runtime_name.clone(), +// migration_name: migration_name.clone(), +// message: Some(format!( +// "Set a default value for the column `{}` in the table `{}`", +// column, table +// )), +// console: console.clone(), +// }; +// +// let reset = ForceReset { +// loader: loader.clone(), +// runtime_name: runtime_name.clone(), +// typegraph_path: sdk_response.typegraph_path.clone(), +// }; +// +// let _ = Select::new(console, "Choose one of the following options".to_string()) +// .interact(&[Box::new(remove_latest), Box::new(manual), Box::new(reset)]) +// .await?; +// } +// +// Ok(()) +// } lazy_static! { static ref RETRY_COUNTERS: Mutex>> = Mutex::new(HashMap::new()); diff --git a/typegate/src/errors.ts b/typegate/src/errors.ts index 5abfe12b18..f20a801228 100644 --- a/typegate/src/errors.ts +++ b/typegate/src/errors.ts @@ -4,6 +4,7 @@ import { basename, dirname } from "std/url/mod.ts"; import { extname } from "std/path/mod.ts"; import { getLogger } from "./log.ts"; +import config from "./config.ts"; const logger = getLogger(import.meta); @@ -63,6 +64,9 @@ export class BaseError extends Error { this.module ?? "", this.message, ); + if (config.debug) { + logger.error(this.stack); + } logger.warn("Responding with HTTP {}", this.code); let responseObj; @@ -92,13 +96,10 @@ export class BaseError extends Error { }; } - return new Response( - JSON.stringify(responseObj), - { - status: this.code, - headers: { "Content-Type": "application/json" }, - }, - ); + return new Response(JSON.stringify(responseObj), { + status: this.code, + headers: { "Content-Type": "application/json" }, + }); } } diff --git a/typegate/src/services/responses.ts b/typegate/src/services/responses.ts index d424d5e841..c8756cc84d 100644 --- a/typegate/src/services/responses.ts +++ b/typegate/src/services/responses.ts @@ -37,11 +37,13 @@ export const jsonError = ( }; export const badRequest = (message: string) => { - return new BaseError(null, ErrorKind.User, message).withType("BadRequest") + return new BaseError(null, ErrorKind.User, message) + .withType("BadRequest") .toResponse(); }; -export const notFound = () => - new BaseError(null, ErrorKind.User, "not found", 404).withType("NotFound") +export const notFound = (message = "not found") => + new BaseError(null, ErrorKind.User, message, 404) + .withType("NotFound") .toResponse(); export const methodNotAllowed = () => @@ -51,5 +53,6 @@ export const methodNotAllowed = () => export const serverError = () => { return new BaseError(null, ErrorKind.Service, "internal server error") - .withType("ServerError").toResponse(); + .withType("ServerError") + .toResponse(); }; diff --git a/typegate/src/typegate/mod.ts b/typegate/src/typegate/mod.ts index a4009d2472..35ff3b6acf 100644 --- a/typegate/src/typegate/mod.ts +++ b/typegate/src/typegate/mod.ts @@ -32,9 +32,8 @@ import { resolveIdentifier } from "../services/middlewares.ts"; import { handleGraphQL } from "../services/graphql_service.ts"; import { getLogger } from "../log.ts"; import { MigrationFailure } from "../runtimes/prisma/hooks/run_migrations.ts"; -import introspectionJson from "../typegraphs/introspection.json" with { - type: "json", -}; +import introspectionJson from "../typegraphs/introspection.json" with { type: + "json" }; import { ArtifactService } from "../services/artifact_service.ts"; import { ArtifactStore } from "./artifacts/mod.ts"; import { SyncConfig } from "../sync/config.ts"; @@ -107,9 +106,7 @@ export class Typegate implements AsyncDisposable { } else { logger.info("Entering sync mode..."); if (customRegister) { - throw new Error( - "Custom register is not supported in sync mode", - ); + throw new Error("Custom register is not supported in sync mode"); } await using stack = new AsyncDisposableStack(); @@ -120,10 +117,7 @@ export class Typegate implements AsyncDisposable { await limiter.terminate(); }); - const artifactStore = await createSharedArtifactStore( - tmpDir, - syncConfig, - ); + const artifactStore = await createSharedArtifactStore(tmpDir, syncConfig); stack.use(artifactStore); const typegate = new Typegate( @@ -230,7 +224,7 @@ export class Typegate implements AsyncDisposable { const engine = this.register.get(engineName); if (!engine) { - return notFound(); + return notFound(`engine not found for typegraph '${engineName}'`); } const cors = engine.tg.cors(request); @@ -283,14 +277,7 @@ export class Typegate implements AsyncDisposable { return methodNotAllowed(); } - return handleGraphQL( - request, - engine, - context, - info, - limit, - headers, - ); + return handleGraphQL(request, engine, context, info, limit, headers); } catch (e) { Sentry.captureException(e); console.error(e); @@ -320,10 +307,7 @@ export class Typegate implements AsyncDisposable { } } - const secretManager = new SecretManager( - tgJson, - secrets, - ); + const secretManager = new SecretManager(tgJson, secrets); const pushResponse = new PushResponse(); logger.info("Handling onPush hooks"); @@ -350,22 +334,19 @@ export class Typegate implements AsyncDisposable { ); const oldArtifacts = new Set( - Object.values(this.register.get(name)?.tg.tg.meta.artifacts ?? {}) - .map((m) => m.hash), + Object.values(this.register.get(name)?.tg.tg.meta.artifacts ?? {}).map( + (m) => m.hash, + ), ); logger.info(`Registering engine '${name}'`); await this.register.add(engine); const newArtifacts = new Set( - Object.values(engine.tg.tg.meta.artifacts) - .map((m) => m.hash), + Object.values(engine.tg.tg.meta.artifacts).map((m) => m.hash), ); - await this.artifactStore.updateRefCounts( - newArtifacts, - oldArtifacts, - ); + await this.artifactStore.updateRefCounts(newArtifacts, oldArtifacts); return { name, @@ -383,8 +364,7 @@ export class Typegate implements AsyncDisposable { await this.register.remove(name); const artifacts = new Set( - Object.values(engine.tg.tg.meta.artifacts) - .map((m) => m.hash), + Object.values(engine.tg.tg.meta.artifacts).map((m) => m.hash), ); await this.artifactStore.updateRefCounts(new Set(), artifacts); await this.artifactStore.runArtifactGC(); @@ -402,18 +382,14 @@ export class Typegate implements AsyncDisposable { const introspection = enableIntrospection ? await TypeGraph.init( - this, - introspectionDef, - new SecretManager(introspectionDef, {}), - { - typegraph: TypeGraphRuntime.init( - tgDS, - [], - {}, - ), - }, - null, - ) + this, + introspectionDef, + new SecretManager(introspectionDef, {}), + { + typegraph: TypeGraphRuntime.init(tgDS, [], {}), + }, + null, + ) : null; const tg = await TypeGraph.init( diff --git a/typegraph/core/src/global_store.rs b/typegraph/core/src/global_store.rs index ca834b471c..2a54df1fb9 100644 --- a/typegraph/core/src/global_store.rs +++ b/typegraph/core/src/global_store.rs @@ -11,7 +11,6 @@ use crate::wit::core::{Policy as CorePolicy, PolicyId, RuntimeId}; use crate::wit::utils::Auth as WitAuth; #[allow(unused)] -use crate::wit::core::ArtifactResolutionConfig; use crate::wit::runtimes::{Effect, MaterializerDenoPredefined, MaterializerId}; use graphql_parser::parse_query; use indexmap::IndexMap; @@ -220,9 +219,9 @@ impl Store { }) } - pub fn set_deploy_cwd(value: Option) { + pub fn set_deploy_cwd(value: PathBuf) { with_store_mut(|s| { - s.deploy_cwd_dir = value.map(PathBuf::from); + s.deploy_cwd_dir = Some(value); }) } diff --git a/typegraph/core/src/lib.rs b/typegraph/core/src/lib.rs index df692acc54..e27c57daad 100644 --- a/typegraph/core/src/lib.rs +++ b/typegraph/core/src/lib.rs @@ -31,9 +31,9 @@ use types::{ use utils::clear_name; use wit::core::{ - Artifact, ArtifactResolutionConfig, ContextCheck, Policy, PolicyId, PolicySpec, TransformData, - TypeBase, TypeEither, TypeFile, TypeFloat, TypeFunc, TypeId as CoreTypeId, TypeInteger, - TypeList, TypeOptional, TypeString, TypeStruct, TypeUnion, TypegraphInitParams, + Artifact, ContextCheck, FinalizeParams, Policy, PolicyId, PolicySpec, TransformData, TypeBase, + TypeEither, TypeFile, TypeFloat, TypeFunc, TypeId as CoreTypeId, TypeInteger, TypeList, + TypeOptional, TypeString, TypeStruct, TypeUnion, TypegraphInitParams, }; use wit::runtimes::{Guest, MaterializerDenoFunc}; @@ -53,9 +53,7 @@ impl wit::core::Guest for Lib { typegraph::init(params) } - fn finalize_typegraph( - res_config: Option, - ) -> Result<(String, Vec)> { + fn finalize_typegraph(res_config: FinalizeParams) -> Result<(String, Vec)> { typegraph::finalize(res_config) } diff --git a/typegraph/core/src/typegraph.rs b/typegraph/core/src/typegraph.rs index 84ce77d061..c09527262f 100644 --- a/typegraph/core/src/typegraph.rs +++ b/typegraph/core/src/typegraph.rs @@ -27,8 +27,8 @@ use std::hash::Hasher as _; use std::rc::Rc; use crate::wit::core::{ - Artifact as WitArtifact, ArtifactResolutionConfig, Error as TgError, Guest, MaterializerId, - PolicyId, PolicySpec, RuntimeId, TypegraphInitParams, + Artifact as WitArtifact, Error as TgError, FinalizeParams, Guest, MaterializerId, PolicyId, + PolicySpec, RuntimeId, TypegraphInitParams, }; #[derive(Default)] @@ -181,9 +181,7 @@ pub fn finalize_auths(ctx: &mut TypegraphContext) -> Result>>() } -pub fn finalize( - res_config: Option, -) -> Result<(String, Vec)> { +pub fn finalize(params: FinalizeParams) -> Result<(String, Vec)> { #[cfg(test)] eprintln!("Finalizing typegraph..."); @@ -221,11 +219,9 @@ pub fn finalize( deps: Default::default(), }; - let config = res_config.map(|config| { - tg.meta.prefix.clone_from(&config.prefix); - config - }); - TypegraphPostProcessor::new(config).postprocess(&mut tg)?; + tg.meta.prefix = params.prefix.clone(); + + TypegraphPostProcessor::new(params).postprocess(&mut tg)?; let artifacts = tg .meta @@ -237,7 +233,7 @@ pub fn finalize( Store::restore(ctx.saved_store_state.unwrap()); - let result = match serde_json::to_string_pretty(&tg).map_err(|e| e.to_string().into()) { + let result = match serde_json::to_string(&tg).map_err(|e| e.to_string().into()) { Ok(res) => res, Err(e) => return Err(e), }; diff --git a/typegraph/core/src/utils/postprocess/mod.rs b/typegraph/core/src/utils/postprocess/mod.rs index f591581e0f..7a491aa1ab 100644 --- a/typegraph/core/src/utils/postprocess/mod.rs +++ b/typegraph/core/src/utils/postprocess/mod.rs @@ -1,9 +1,9 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use crate::{global_store::Store, utils::fs_host, wit::core::ArtifactResolutionConfig}; +use crate::{global_store::Store, utils::fs_host, wit::core::FinalizeParams}; use common::typegraph::Typegraph; -use std::path::Path; +use std::path::{Path, PathBuf}; pub mod deno_rt; pub mod prisma_rt; @@ -24,31 +24,34 @@ pub trait PostProcessor { /// Compose all postprocessors pub struct TypegraphPostProcessor { - config: Option, + config: FinalizeParams, } impl TypegraphPostProcessor { - pub fn new(config: Option) -> Self { + pub fn new(config: FinalizeParams) -> Self { Self { config } } } impl PostProcessor for TypegraphPostProcessor { fn postprocess(self, tg: &mut Typegraph) -> Result<(), TgError> { - if let Some(config) = self.config { - Store::set_deploy_cwd(config.dir); // fs_host::cwd() will now use this value - Store::set_codegen_flag(config.codegen); + let config = self.config; + let typegraph_dir = PathBuf::from(config.typegraph_path) + .parent() + .unwrap() + .to_owned(); + Store::set_deploy_cwd(typegraph_dir); // fs_host::cwd() will now use this value + Store::set_codegen_flag(Some(config.codegen)); - PrismaProcessor::new(config.prisma_migration).postprocess(tg)?; + PrismaProcessor::new(config.prisma_migration).postprocess(tg)?; - // Artifact resolution depends on the default cwd() (parent process) - // unless overwritten by `dir` through Store::set_deploy_cwd(..) (cli or custom dir with tgDeploy) - let allow_fs_read_artifacts = !config.disable_artifact_resolution.unwrap_or(false); - if allow_fs_read_artifacts { - DenoProcessor.postprocess(tg)?; - PythonProcessor.postprocess(tg)?; - WasmProcessor.postprocess(tg)?; - } + // Artifact resolution depends on the default cwd() (parent process) + // unless overwritten by `dir` through Store::set_deploy_cwd(..) (cli or custom dir with tgDeploy) + let allow_fs_read_artifacts = config.artifact_resolution; + if allow_fs_read_artifacts { + DenoProcessor.postprocess(tg)?; + PythonProcessor.postprocess(tg)?; + WasmProcessor.postprocess(tg)?; } ValidationProcessor.postprocess(tg)?; diff --git a/typegraph/core/src/utils/postprocess/prisma_rt.rs b/typegraph/core/src/utils/postprocess/prisma_rt.rs index eece1171aa..751972d6a6 100644 --- a/typegraph/core/src/utils/postprocess/prisma_rt.rs +++ b/typegraph/core/src/utils/postprocess/prisma_rt.rs @@ -8,15 +8,16 @@ use common::typegraph::Typegraph; use crate::utils::fs_host; use crate::utils::postprocess::PostProcessor; -use crate::wit::core::{MigrationAction, MigrationConfig}; +use crate::wit::core::MigrationAction; +use crate::wit::core::PrismaMigrationConfig; use crate::wit::metatype::typegraph::host::{eprint, path_exists}; pub struct PrismaProcessor { - config: MigrationConfig, + config: PrismaMigrationConfig, } impl PrismaProcessor { - pub fn new(config: MigrationConfig) -> Self { + pub fn new(config: PrismaMigrationConfig) -> Self { Self { config } } } @@ -30,7 +31,7 @@ impl PostProcessor for PrismaProcessor { impl PrismaProcessor { pub fn embed_prisma_migrations(&self, tg: &mut Typegraph) -> Result<(), String> { - let base_migration_path = self.prisma_migrations_dir()?; + let base_migration_path = PathBuf::from(&self.config.migrations_dir); for rt in tg.runtimes.iter_mut() { if let TGRuntime::Known(Prisma(rt_data)) = rt { @@ -57,22 +58,20 @@ impl PrismaProcessor { Ok(()) } - /// Simply concat `cwd` with `migration-path` (provided manually or set by the cli) - pub fn prisma_migrations_dir(&self) -> Result { - let migration_dir = self.config.migration_dir.clone(); - let path = fs_host::cwd()?.join(PathBuf::from(migration_dir)); - Ok(path) - } - /// Find the appropriate migration action (usually set from the cli) /// If nothing is found, use the global action config (set initially) pub fn get_action_by_rt_name(&self, name: &str) -> MigrationAction { - if let Some(actions) = self.config.runtime_actions.clone() { - if let Some(action) = actions.iter().find(|(rt, _)| rt.eq(name)) { - eprint(&format!("Specific migration action found for {name}")); - return action.1; - } - } - self.config.global_action + self.config + .migration_actions + .iter() + .filter_map(|(rt, action)| { + if rt == name { + Some(action.clone()) + } else { + None + } + }) + .last() + .unwrap_or(self.config.default_migration_action.clone()) } } diff --git a/typegraph/core/wit/typegraph.wit b/typegraph/core/wit/typegraph.wit index 7d16c53d04..9e2c0cf615 100644 --- a/typegraph/core/wit/typegraph.wit +++ b/typegraph/core/wit/typegraph.wit @@ -42,26 +42,38 @@ interface core { init-typegraph: func(params: typegraph-init-params) -> result<_, error>; + // record migration-action { + // create: bool, + // reset: bool + // } + + // record migration-config { + // migration-dir: string, + // global-action: migration-action, // global config (all runtimes) + // runtime-actions: option>> // config per runtime name (override global-action) + // } + record migration-action { + apply: bool, create: bool, - reset: bool + reset: bool, } - record migration-config { - migration-dir: string, - global-action: migration-action, // global config (all runtimes) - runtime-actions: option>> // config per runtime name (override global-action) + record prisma-migration-config { + migrations-dir: string, + migration-actions: list>, + default-migration-action: migration-action, } - record artifact-resolution-config { - dir: option, // cwd() if none - prefix: option, // overrides tg.meta.prefix field if defined - disable-artifact-resolution: option, // any postprocess using fs ops should be disabled when set to true - prisma-migration: migration-config, - codegen: option // if set, sdk should not throw errors when the script does not exist + record finalize-params { + typegraph-path: string, + prefix: option, + artifact-resolution: bool, + codegen: bool, + prisma-migration: prisma-migration-config, } - finalize-typegraph: func(config: option) -> result>, error>; + finalize-typegraph: func(params: finalize-params) -> result>, error>; type type-id = u32; record type-base { diff --git a/typegraph/node/sdk/src/io.ts b/typegraph/node/sdk/src/io.ts new file mode 100644 index 0000000000..2523057e29 --- /dev/null +++ b/typegraph/node/sdk/src/io.ts @@ -0,0 +1,254 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 +import { inspect } from "node:util"; +import { createInterface, Interface } from "node:readline"; + +/** + * see: module level documentation `meta-cli/src/deploy/actors/task.rs` + */ + +function getOutput(args: any[]) { + return args + .map((arg) => { + if (typeof arg === "string") return arg; + return inspect(arg, { + colors: process.stdout.isTTY, + depth: 10, + maxStringLength: 1000, + maxArrayLength: 20, + }); + }) + .join(" "); +} + +export const log = { + debug(...args: any[]) { + const output = getOutput(args); + process.stdout.write(`debug: ${output}\n`); + }, + info(...args: any[]) { + const output = getOutput(args); + process.stdout.write(`info: ${output}\n`); + }, + warn(...args: any[]) { + const output = getOutput(args); + process.stdout.write(`warning: ${output}\n`); + }, + error(...args: any[]) { + const output = getOutput(args); + process.stdout.write(`error: ${output}\n`); + }, + + failure(data: any) { + process.stdout.write(`failure: ${JSON.stringify(data)}\n`); + }, + success(data: any, noEncode = false) { + const encoded = noEncode ? data : JSON.stringify(data); + process.stdout.write(`success: ${encoded}\n`); + }, +}; + +class RpcResponseReader { + private resolvers: Map void> = new Map(); + // private readline: Interface; + private buffer: string = ""; + // private listening = false; + // private handler: (line: string) => void; + + constructor() { + log.debug("creating readline interface"); + process.stdin.setEncoding("utf-8"); + // this.readline = createInterface({ + // input: process.stdin, + // }); + + // const handler = (line: string) => { + // log.debug("got line", line); + // const message = JSON.parse(line); + // const resolver = this.resolvers.get(message.id); + // if (resolver) { + // log.debug("rpc response", message); + // resolver(message.result); + // this.resolvers.delete(message.id); + + // if (this.resolvers.size === 0) { + // this.readline.pause(); + // log.debug("paused"); + // } + // } + // }; + + // log.debug("adding line handler"); + // this.readline.on("line", handler); + + // log.debug("unref stdin"); + // process.stdin.unref(); + // log.debug("unreffed stdin"); + } + + // async open() { + // if (this.readline) { // return; + // } + // this.readline = createInterface({ + // input: process.stdin, + // }); + // log.debug("opened"); + + // for await (const line of this.readline) { + // const message = JSON.parse(line); + // const resolver = this.resolvers.get(message.id); + // if (resolver) { + // log.debug("rpc response", message); + // resolver(message.result); + // this.resolvers.delete(message.id); + // if (this.resolvers.size === 0) { + // this.readline.close(); + // log.debug("closed"); + // } + // } + // } + // } + + // TODO implement timeout + // async loop() { + // log.debug("loop: on"); + // for await (const line of this.readline) { + // log.debug("resolvers", this.resolvers.size, line); + // try { + // const message = JSON.parse(line); + // const resolver = this.resolvers.get(message.id); + // if (resolver) { + // log.debug("rpc response", message); + // resolver(message.result); + // this.resolvers.delete(message.id); + // if (this.resolvers.size === 0) { + // break; + // } + // } + // } catch (e) { + // // pass + // } + // } + // this.running = false; + // log.debug("loop: off"); + // } + + read(id: number) { + return new Promise((resolve, reject) => { + const handler = () => { + while (true) { + const chunk = process.stdin.read(); + if (chunk == null) { + break; + } + this.buffer += chunk; + const lines = this.buffer.split(/\r\n|\n/); + if (lines.length > 2) { + reject(new Error("not sequential")); + } else if (lines.length <= 1) { + continue; + } + this.buffer = lines.pop()!; + + try { + const message = JSON.parse(lines[0]); + if (message.id === id) { + resolve(message.result); + break; + } + } catch (e) { + reject("invalid message"); + } + } + process.stdin.off("readable", handler); + }; + process.stdin.on("readable", handler); + }); + + // if (!this.listening) { + // this.readline.resume(); + // log.debug("listening: on"); + // this.listening = true; + // this.readline.on("line", this.handler); + // } + // if (!this.running) { + // this.running = true; + // this.loop(); + // } + // this.open(); // no await + // this.readline.resume(); + // return new Promise((resolve) => { + // this.resolvers.set(id, resolve); + // }); + + // return new Promise((resolve, reject) => { + // this.readline.resume(); + // this.readline.once("line", (line) => { + // try { + // const message = JSON.parse(line); + // if (message.id !== id) { + // reject("required sequential read"); + // } else { + // this.readline.pause(); + // resolve(message.result); + // } + // } catch (e) { + // reject(e); + // } + // }); + // }); + } +} + +let rpcCall = (() => { + const responseReader = new RpcResponseReader(); + let latestRpcId = 0; + + return (method: string, params: any = null) => { + const rpcId = latestRpcId++; + const rpcMessage = JSON.stringify({ + jsonrpc: "2.0", + id: rpcId, + method, + params, + }); + + process.stdout.write(`jsonrpc: ${rpcMessage}\n`); + return responseReader.read(rpcId); + }; +})(); + +export interface TypegateConfig { + endpoint: string; + auth: { + username: string; + password: string; + }; +} + +export interface GlobalConfig { + typegate: TypegateConfig | null; // null for serialize + prefix: string | null; + // TODO codegen + // TODO base migration directory +} + +export interface MigrationAction { + apply: boolean; + create: boolean; + reset: boolean; +} + +export interface TypegraphConfig { + secrets: Record; + artifactResolution: boolean; + migrationActions: Record; + defaultMigrationAction: MigrationAction; + migrationsDir: string; +} + +export const rpc = { + getGlobalConfig: () => rpcCall("queryGlobalConfig") as Promise, + getTypegraphConfig: (typegraph: string) => + rpcCall("queryTypegraphConfig", { typegraph }) as Promise, +}; diff --git a/typegraph/node/sdk/src/log.ts b/typegraph/node/sdk/src/log.ts deleted file mode 100644 index c88a1bb8b9..0000000000 --- a/typegraph/node/sdk/src/log.ts +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. -// SPDX-License-Identifier: MPL-2.0 - -import { inspect } from "node:util"; - -/** - * see: module level documentation `meta-cli/src/deploy/actors/task.rs` - */ - -function getOutput(args: any[]) { - return args.map((arg) => { - if (typeof arg === "string") return arg; - return inspect(arg, { - colors: process.stdout.isTTY, - depth: 10, - maxStringLength: 1000, - maxArrayLength: 20, - }); - }).join(" "); -} - -export const log = { - debug(...args: any[]) { - const output = getOutput(args); - process.stdout.write(`debug: ${output}\n`); - }, - info(...args: any[]) { - const output = getOutput(args); - process.stdout.write(`info: ${output}\n`); - }, - warn(...args: any[]) { - const output = getOutput(args); - process.stdout.write(`warning: ${output}\n`); - }, - error(...args: any[]) { - const output = getOutput(args); - process.stdout.write(`error: ${output}\n`); - }, - - failure(data: any) { - process.stdout.write(`failure: ${JSON.stringify(data)}\n`); - }, - success(data: any) { - process.stdout.write(`success: ${JSON.stringify(data)}\n`); - }, -}; diff --git a/typegraph/node/sdk/src/metagen.ts b/typegraph/node/sdk/src/metagen.ts index 50afd6bb29..2aeeaf763c 100644 --- a/typegraph/node/sdk/src/metagen.ts +++ b/typegraph/node/sdk/src/metagen.ts @@ -1,38 +1,43 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -import { ArtifactResolutionConfig } from "./gen/interfaces/metatype-typegraph-core.js"; +import { FinalizeParams } from "./gen/interfaces/metatype-typegraph-core.js"; import { TypegraphOutput } from "./typegraph.js"; import { wit_utils } from "./wit.js"; -import { freezeTgOutput } from "./utils/func_utils.js"; +import { freezeTgOutput, getEnvVariable } from "./utils/func_utils.js"; import { MdkConfig, MdkOutput, } from "./gen/interfaces/metatype-typegraph-utils.js"; -const codegenArtefactConfig = { +const finalizeParams = { + // TODO env variable key constants.js + typegraphPath: getEnvVariable("MCLI_TG_PATH")!, + prefix: undefined, + artifactResolution: false, + codegen: true, prismaMigration: { - globalAction: { - create: false, - reset: false, + migrationsDir: "prisma-migrations", + migrationActions: [], + defaultMigrationAction: { + apply: true, + create: true, + reset: true, }, - migrationDir: ".", }, - disableArtifactResolution: true, - codegen: true, -} as ArtifactResolutionConfig; +} satisfies FinalizeParams; export class Metagen { - constructor(private workspacePath: string, private genConfig: unknown) {} + constructor( + private workspacePath: string, + private genConfig: unknown, + ) {} - private getMdkConfig( - tgOutput: TypegraphOutput, - targetName: string, - ) { - const frozenOut = freezeTgOutput(codegenArtefactConfig, tgOutput); + private getMdkConfig(tgOutput: TypegraphOutput, targetName: string) { + const frozenOut = freezeTgOutput(finalizeParams, tgOutput); return { configJson: JSON.stringify(this.genConfig), - tgJson: frozenOut.serialize(codegenArtefactConfig).tgJson, + tgJson: frozenOut.serialize(finalizeParams).tgJson, targetName, workspacePath: this.workspacePath, } as MdkConfig; @@ -40,11 +45,10 @@ export class Metagen { dryRun(tgOutput: TypegraphOutput, targetName: string, overwrite?: false) { const mdkConfig = this.getMdkConfig(tgOutput, targetName); - return wit_utils.metagenExec(mdkConfig) - .map((value) => ({ - ...value, - overwrite: overwrite ?? value.overwrite, - })) as Array; + return wit_utils.metagenExec(mdkConfig).map((value) => ({ + ...value, + overwrite: overwrite ?? value.overwrite, + })) as Array; } run(tgOutput: TypegraphOutput, targetName: string, overwrite?: false) { diff --git a/typegraph/node/sdk/src/tg_artifact_upload.ts b/typegraph/node/sdk/src/tg_artifact_upload.ts index 22c5ee7fd7..c33e23fd6d 100644 --- a/typegraph/node/sdk/src/tg_artifact_upload.ts +++ b/typegraph/node/sdk/src/tg_artifact_upload.ts @@ -5,7 +5,7 @@ import { BasicAuth } from "./tg_deploy.js"; import { Artifact } from "./gen/interfaces/metatype-typegraph-core.js"; import { dirname, join } from "node:path"; import * as fsp from "node:fs/promises"; -import { log } from "./log.js"; +import { log } from "./io.js"; import { execRequest } from "./utils/func_utils.js"; interface UploadArtifactMeta { @@ -34,11 +34,15 @@ export class ArtifactUploader { artifactMetas: UploadArtifactMeta[], ): Promise> { const artifactsJson = JSON.stringify(artifactMetas); - const uploadUrls: Array = await execRequest(this.getUploadUrl, { - method: "POST", - headers: this.headers, - body: artifactsJson, - }, `tgDeploy failed to get upload urls`); + const uploadUrls: Array = await execRequest( + this.getUploadUrl, + { + method: "POST", + headers: this.headers, + body: artifactsJson, + }, + `tgDeploy failed to get upload urls`, + ); // if (!response.ok) { // log.debug("response", response); @@ -50,8 +54,7 @@ export class ArtifactUploader { // const uploadUrls: Array = await response.json(); if (uploadUrls.length !== artifactMetas.length) { - const diff = - `array length mismatch: ${uploadUrls.length} !== ${artifactMetas.length}`; + const diff = `array length mismatch: ${uploadUrls.length} !== ${artifactMetas.length}`; throw new Error(`Failed to get upload URLs for all artifacts: ${diff}`); } @@ -84,11 +87,15 @@ export class ArtifactUploader { // TODO: stream const content = await fsp.readFile(path); log.info("uploading artifact", meta.relativePath, urlObj.href); - const res = await execRequest(urlObj, { - method: "POST", - headers: uploadHeaders, - body: new Uint8Array(content), - } as RequestInit, `failed to upload artifact ${meta.relativePath}`); + const res = await execRequest( + urlObj, + { + method: "POST", + headers: uploadHeaders, + body: new Uint8Array(content), + } as RequestInit, + `failed to upload artifact ${meta.relativePath}`, + ); if (!res.ok) { const err = await res.json(); // To be read by the CLI? @@ -104,16 +111,14 @@ export class ArtifactUploader { } private getMetas(artifacts: Artifact[]): UploadArtifactMeta[] { - return artifacts.map( - (artifact) => { - return { - typegraphName: this.tgName, - hash: artifact.hash, - relativePath: artifact.path, - sizeInBytes: artifact.size, - }; - }, - ); + return artifacts.map((artifact) => { + return { + typegraphName: this.tgName, + hash: artifact.hash, + relativePath: artifact.path, + sizeInBytes: artifact.size, + }; + }); } private handleUploadErrors( @@ -146,11 +151,9 @@ export class ArtifactUploader { const uploadUrls = await this.fetchUploadUrls(artifactMetas); log.debug("upload urls", uploadUrls); const results = await Promise.allSettled( - uploadUrls.map( - async (url, i) => { - return await this.upload(url, artifactMetas[i]); - }, - ), + uploadUrls.map(async (url, i) => { + return await this.upload(url, artifactMetas[i]); + }), ); this.handleUploadErrors(results, artifactMetas); diff --git a/typegraph/node/sdk/src/tg_deploy.ts b/typegraph/node/sdk/src/tg_deploy.ts index 5bd59445fd..a0ecd3e50c 100644 --- a/typegraph/node/sdk/src/tg_deploy.ts +++ b/typegraph/node/sdk/src/tg_deploy.ts @@ -1,31 +1,42 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -import { ArtifactResolutionConfig } from "./gen/interfaces/metatype-typegraph-core.js"; +import { + FinalizeParams, + MigrationAction, +} from "./gen/interfaces/metatype-typegraph-core.js"; import { ArtifactUploader } from "./tg_artifact_upload.js"; import { TypegraphOutput } from "./typegraph.js"; import { wit_utils } from "./wit.js"; import { execRequest } from "./utils/func_utils.js"; export class BasicAuth { - constructor(public username: string, public password: string) { - } + constructor( + public username: string, + public password: string, + ) {} asHeaderValue(): string { return `Basic ${btoa(this.username + ":" + this.password)}`; } } +export interface TypegateConnectionOptions { + url: string; + auth?: BasicAuth; +} + export interface TypegraphDeployParams { + typegate: TypegateConnectionOptions; typegraphPath: string; - baseUrl: string; - auth?: BasicAuth; - artifactsConfig: ArtifactResolutionConfig; - secrets: Record; + prefix?: string; + secrets?: Record; + migrationsDir?: string; + migrationActions?: Record; + defaultMigrationAction?: MigrationAction; } export interface TypegraphRemoveParams { - baseUrl: string; - auth?: BasicAuth; + typegate: TypegateConnectionOptions; } export interface DeployResult { @@ -48,24 +59,39 @@ export async function tgDeploy( typegraph: TypegraphOutput, params: TypegraphDeployParams, ): Promise { - const { baseUrl, secrets, auth, artifactsConfig } = params; - const serialized = typegraph.serialize(artifactsConfig); + const serializeParams = { + typegraphPath: params.typegraphPath, + prefix: params.prefix, + artifactResolution: true, + codegen: false, + prismaMigration: { + migrationsDir: params.migrationsDir ?? "prisma-migrations", + migrationActions: Object.entries(params.migrationActions ?? {}), + defaultMigrationAction: params.defaultMigrationAction ?? { + apply: true, + create: false, + reset: false, + }, + }, + } satisfies FinalizeParams; + const serialized = typegraph.serialize(serializeParams); const tgJson = serialized.tgJson; const refArtifacts = serialized.ref_artifacts; const headers = new Headers(); headers.append("Content-Type", "application/json"); - if (auth) { - headers.append("Authorization", auth.asHeaderValue()); + const typegate = params.typegate; + if (typegate.auth) { + headers.append("Authorization", typegate.auth.asHeaderValue()); } if (refArtifacts.length > 0) { // upload the artifacts const artifactUploader = new ArtifactUploader( - baseUrl, + params.typegate.url, refArtifacts, typegraph.name, - auth, + typegate.auth, headers, params.typegraphPath, ); @@ -73,14 +99,18 @@ export async function tgDeploy( } // deploy the typegraph - const response = await execRequest(new URL("/typegate", baseUrl), { - method: "POST", - headers, - body: wit_utils.gqlDeployQuery({ - tg: tgJson, - secrets: Object.entries(secrets ?? {}), - }), - }, `tgDeploy failed to deploy typegraph ${typegraph.name}`); + const response = await execRequest( + new URL("/typegate", typegate.url), + { + method: "POST", + headers, + body: wit_utils.gqlDeployQuery({ + tg: tgJson, + secrets: Object.entries(params.secrets ?? {}), + }), + }, + `tgDeploy failed to deploy typegraph ${typegraph.name}`, + ); if (response.errors) { for (const err of response.errors) { @@ -99,7 +129,7 @@ export async function tgRemove( typegraph: TypegraphOutput, params: TypegraphRemoveParams, ): Promise { - const { baseUrl, auth } = params; + const { url, auth } = params.typegate; const headers = new Headers(); headers.append("Content-Type", "application/json"); @@ -107,11 +137,15 @@ export async function tgRemove( headers.append("Authorization", auth.asHeaderValue()); } - const response = await execRequest(new URL("/typegate", baseUrl), { - method: "POST", - headers, - body: wit_utils.gqlRemoveQuery([typegraph.name]), - }, `tgRemove failed to remove typegraph ${typegraph.name}`); + const response = await execRequest( + new URL("/typegate", url), + { + method: "POST", + headers, + body: wit_utils.gqlRemoveQuery([typegraph.name]), + }, + `tgRemove failed to remove typegraph ${typegraph.name}`, + ); return { typegate: response }; } diff --git a/typegraph/node/sdk/src/tg_manage.ts b/typegraph/node/sdk/src/tg_manage.ts index f515e80cab..a69bfc3067 100644 --- a/typegraph/node/sdk/src/tg_manage.ts +++ b/typegraph/node/sdk/src/tg_manage.ts @@ -1,12 +1,12 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -import { ArtifactResolutionConfig } from "./gen/interfaces/metatype-typegraph-core.js"; +import { FinalizeParams } from "./gen/interfaces/metatype-typegraph-core.js"; import { BasicAuth, tgDeploy } from "./tg_deploy.js"; import { TgFinalizationResult, TypegraphOutput } from "./typegraph.js"; import { getEnvVariable } from "./utils/func_utils.js"; import { freezeTgOutput } from "./utils/func_utils.js"; -import { log } from "./log.js"; +import { log, rpc, GlobalConfig, TypegraphConfig } from "./io.js"; const PORT = "MCLI_SERVER_PORT"; // meta-cli instance that executes the current file const SELF_PATH = "MCLI_TG_PATH"; // path to the current file to uniquely identify the run results @@ -22,14 +22,15 @@ type CLIServerResponse = { type CLIConfigRequest = { typegate: { endpoint: string; - auth?: { // field not required for serialize command + auth?: { + // field not required for serialize command username: string; password: string; }; }; prefix?: string; secrets: Record; - artifactsConfig: ArtifactResolutionConfig; + artifactsConfig: FinalizeParams; disableArtifactResolution: boolean; codegen: boolean; }; @@ -46,73 +47,96 @@ type SDKResponse = { } & ({ error: T } | { data: T }); export class Manager { - #port: number; #typegraph: TypegraphOutput; - #endpoint: string; #typegraphPath: string; + static #globalConfig: GlobalConfig | null = null; + static async getGlobalConfig(): Promise { + if (Manager.#globalConfig == null) { + Manager.#globalConfig = await rpc.getGlobalConfig(); + } + return Manager.#globalConfig; + } + + static #command: Command | null = null; + static getCommand(): Command { + if (Manager.#command == null) { + Manager.#command = getEnvVariable("MCLI_ACTION") as Command; + } + return Manager.#command; + } + static isRunFromCLI(): boolean { return !!getEnvVariable(PORT); } - constructor(typegraph: TypegraphOutput, port?: number) { + public static async init(typegraph: TypegraphOutput) { + const globalConfig = await Manager.getGlobalConfig(); + const typegraphConfig = await rpc.getTypegraphConfig(typegraph.name); + return new Manager(typegraph, globalConfig, typegraphConfig); + } + + private constructor( + typegraph: TypegraphOutput, + private globalConfig: GlobalConfig, + private typegraphConfig: TypegraphConfig, + ) { this.#typegraph = typegraph; this.#typegraphPath = getEnvVariable(SELF_PATH)!; - if (port == undefined) { - const envPort = parseInt(getEnvVariable(PORT)!); - if (isNaN(envPort)) { - throw new Error( - `Environment variable ${PORT} is not a number or is undefined`, - ); - } - this.#port = envPort; - } else { - this.#port = port; - } - this.#endpoint = `http://localhost:${this.#port}`; } async run() { - const { config, command } = await this.#requestCommands(); + const command = Manager.getCommand(); + log.debug("Manager: command is", command); + + const finalizeParams = { + typegraphPath: this.#typegraphPath, + prefix: this.globalConfig.prefix ?? undefined, + artifactResolution: true, + codegen: false, + prismaMigration: { + migrationsDir: this.typegraphConfig.migrationsDir, + migrationActions: Object.entries(this.typegraphConfig.migrationActions), + defaultMigrationAction: this.typegraphConfig.defaultMigrationAction, + }, + } as FinalizeParams; + switch (command) { case "serialize": - await this.#serialize(config); + await this.#serialize(finalizeParams); break; case "deploy": - await this.#deploy(config); + await this.#deploy(finalizeParams); break; default: throw new Error(`command ${command} from meta-cli not supported`); } } - async #requestCommands(): Promise { - const { data: config } = await this.#requestConfig(); - // console.error("SDK received config", config); - const { data: command } = - await (await fetch(new URL("command", this.#endpoint))) - .json() as CLISuccess; - // console.error("SDK received command", command); - - return { command, config }; - } + // async #requestCommands(): Promise { + // const { data: config } = await this.#requestConfig(); + // // console.error("SDK received config", config); + // const { data: command } = + // await (await fetch(new URL("command", this.#endpoint))) + // .json() as CLISuccess; + // // console.error("SDK received command", command); + // + // return { command, config }; + // } + // + // async #requestConfig(): Promise> { + // const params = new URLSearchParams({ + // typegraph: this.#typegraph.name, + // typegraph_path: this.#typegraphPath, + // }); + // const response = await fetch(new URL("config?" + params, this.#endpoint)); + // return (await response.json()) as CLISuccess; + // } - async #requestConfig(): Promise> { - const params = new URLSearchParams({ - typegraph: this.#typegraph.name, - typegraph_path: this.#typegraphPath, - }); - const response = await fetch(new URL("config?" + params, this.#endpoint)); - return (await response.json()) as CLISuccess; - } - - async #serialize(config: CLIConfigRequest): Promise { + async #serialize(config: FinalizeParams): Promise { let finalizationResult: TgFinalizationResult; try { - finalizationResult = this.#typegraph.serialize({ - ...config.artifactsConfig, - prefix: config.prefix, - }); + finalizationResult = this.#typegraph.serialize(config); } catch (err: any) { log.failure({ typegraph: this.#typegraph.name, @@ -128,33 +152,29 @@ export class Manager { // }, // ); } - await this.#relayResultToCLI( - "serialize", - JSON.parse(finalizationResult.tgJson), - ); + + log.success(finalizationResult.tgJson, true); + // await this.#relayResultToCLI( + // "serialize", + // JSON.parse(finalizationResult.tgJson), + // ); } - async #deploy( - { typegate, artifactsConfig, secrets, prefix }: CLIConfigRequest, - ): Promise { - const { endpoint, auth } = typegate; + async #deploy(finalizeParams: FinalizeParams): Promise { + const { endpoint, auth } = this.globalConfig.typegate!; if (!auth) { throw new Error( `"${this.#typegraph.name}" received null or undefined "auth" field on the configuration`, ); } - const config = { - ...artifactsConfig, - prefix, - }; // hack for allowing tg.serialize(config) to be called more than once - const frozenOut = freezeTgOutput(config, this.#typegraph); + const frozenOut = freezeTgOutput(finalizeParams, this.#typegraph); // hack for allowing tg.serialize(config) to be called more than once let frozenSerialized: TgFinalizationResult; try { - frozenSerialized = frozenOut.serialize(config); + frozenSerialized = frozenOut.serialize(finalizeParams); } catch (err: any) { log.failure({ typegraph: this.#typegraph.name, @@ -175,20 +195,27 @@ export class Manager { serialize: () => frozenSerialized, } as TypegraphOutput; - if (artifactsConfig.codegen) { - await this.#relayResultToCLI( - "codegen", - JSON.parse(frozenSerialized.tgJson), - ); + if (finalizeParams.codegen) { + // TODO + throw new Error("not implemented"); + // await this.#relayResultToCLI( + // "codegen", + // JSON.parse(frozenSerialized.tgJson), + // ); } try { const { response } = await tgDeploy(reusableTgOutput, { - baseUrl: endpoint, - artifactsConfig: config, - secrets, - auth: new BasicAuth(auth.username, auth.password), + typegate: { + url: endpoint, + auth: new BasicAuth(auth.username, auth.password), + }, typegraphPath: this.#typegraphPath, + prefix: finalizeParams.prefix, + secrets: this.typegraphConfig.secrets, + migrationsDir: this.typegraphConfig.migrationsDir, + migrationActions: this.typegraphConfig.migrationActions, + defaultMigrationAction: this.typegraphConfig.defaultMigrationAction, }); log.success({ typegraph: this.#typegraph.name, ...response }); @@ -201,42 +228,42 @@ export class Manager { } } - async #relayResultToCLI(initiator: Command, data: T) { - const typegraphName = this.#typegraph.name; - const response: SDKResponse = { - command: initiator, - typegraphName, - typegraphPath: this.#typegraphPath, - data, - }; - await fetch(new URL("response", this.#endpoint), { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify(response), - }); - } + // async #relayResultToCLI(initiator: Command, data: T) { + // const typegraphName = this.#typegraph.name; + // const response: SDKResponse = { + // command: initiator, + // typegraphName, + // typegraphPath: this.#typegraphPath, + // data, + // }; + // await fetch(new URL("response", this.#endpoint), { + // method: "POST", + // headers: { "Content-Type": "application/json" }, + // body: JSON.stringify(response), + // }); + // } - async #relayErrorToCLI( - initiator: Command, - code: string, - msg: string, - value: string | any, - ) { - const typegraphName = this.#typegraph.name; - const response: SDKResponse = { - command: initiator, - typegraphName, - typegraphPath: this.#typegraphPath, - error: { - code, - msg, - value, - }, - }; - await fetch(new URL("response", this.#endpoint), { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify(response), - }); - } + // async #relayErrorToCLI( + // initiator: Command, + // code: string, + // msg: string, + // value: string | any, + // ) { + // const typegraphName = this.#typegraph.name; + // const response: SDKResponse = { + // command: initiator, + // typegraphName, + // typegraphPath: this.#typegraphPath, + // error: { + // code, + // msg, + // value, + // }, + // }; + // await fetch(new URL("response", this.#endpoint), { + // method: "POST", + // headers: { "Content-Type": "application/json" }, + // body: JSON.stringify(response), + // }); + // } } diff --git a/typegraph/node/sdk/src/typegraph.ts b/typegraph/node/sdk/src/typegraph.ts index 682ab08371..3d25a66b20 100644 --- a/typegraph/node/sdk/src/typegraph.ts +++ b/typegraph/node/sdk/src/typegraph.ts @@ -14,9 +14,10 @@ import { Auth, Cors as CorsWit, Rate, wit_utils } from "./wit.js"; import { getPolicyChain } from "./types.js"; import { Artifact, - ArtifactResolutionConfig, + FinalizeParams, } from "./gen/interfaces/metatype-typegraph-core.js"; import { Manager } from "./tg_manage.js"; +import { log } from "./io.js"; type Exports = Record; @@ -34,23 +35,29 @@ interface TypegraphArgs { } export class ApplyFromArg { - constructor(public name: string | null, public type: number | null) { } + constructor( + public name: string | null, + public type: number | null, + ) {} } export class ApplyFromStatic { - constructor(public value: any) { } + constructor(public value: any) {} } export class ApplyFromSecret { - constructor(public key: string) { } + constructor(public key: string) {} } export class ApplyFromContext { - constructor(public key: string | null, public type: number | null) { } + constructor( + public key: string | null, + public type: number | null, + ) {} } export class ApplyFromParent { - constructor(public typeName: string) { } + constructor(public typeName: string) {} } const InjectionSource = { @@ -113,11 +120,11 @@ export class InheritDef { export type TypegraphBuilder = (g: TypegraphBuilderArgs) => void; export class RawAuth { - constructor(readonly jsonStr: string) { } + constructor(readonly jsonStr: string) {} } export interface TypegraphOutput { - serialize: (config: ArtifactResolutionConfig) => TgFinalizationResult; + serialize: (config: FinalizeParams) => TgFinalizationResult; name: string; } @@ -126,13 +133,13 @@ export interface TgFinalizationResult { ref_artifacts: Artifact[]; } +let counter = 0; + export async function typegraph( name: string, builder: TypegraphBuilder, ): Promise; -export async function typegraph( - args: TypegraphArgs, -): Promise; +export async function typegraph(args: TypegraphArgs): Promise; export async function typegraph( args: Omit, builder: TypegraphBuilder, @@ -141,21 +148,13 @@ export async function typegraph( nameOrArgs: string | TypegraphArgs | Omit, maybeBuilder?: TypegraphBuilder, ): Promise { - const args = typeof nameOrArgs === "string" - ? { name: nameOrArgs } - : nameOrArgs; + ++counter; + const args = + typeof nameOrArgs === "string" ? { name: nameOrArgs } : nameOrArgs; - const { - name, - dynamic, - cors, - prefix, - rate, - secrets, - } = args; - const builder = "builder" in args - ? args.builder as TypegraphBuilder - : maybeBuilder!; + const { name, dynamic, cors, prefix, rate, secrets } = args; + const builder = + "builder" in args ? (args.builder as TypegraphBuilder) : maybeBuilder!; const file = caller(); if (!file) { @@ -163,11 +162,7 @@ export async function typegraph( } // node/deno compat tick until MET-236 is landed const simpleFile = file.replace(/:[0-9]+$/, "").replace(/^file:\/\//, ""); - const path = dirname( - fromFileUrl( - `file://${simpleFile}`, - ), - ); + const path = dirname(fromFileUrl(`file://${simpleFile}`)); const defaultCorsFields = { allowCredentials: true, @@ -222,7 +217,8 @@ export async function typegraph( builder(g); const ret = { - serialize(config: ArtifactResolutionConfig) { + serialize(config: FinalizeParams) { + log.debug("finalizeParams", config); try { const [tgJson, ref_artifacts] = core.finalizeTypegraph(config); const result: TgFinalizationResult = { @@ -243,9 +239,21 @@ export async function typegraph( } as TypegraphOutput; if (Manager.isRunFromCLI()) { - const manager = new Manager(ret); + log.debug("creating Manager"); + const manager = await Manager.init(ret); + log.debug("running Manager"); await manager.run(); + log.debug("done"); + + // TODO solve hanging process (stdin??) + setTimeout(() => { + if (counter === 0) { + log.debug("exiting"); + process.exit(0); + } + }, 10); } + --counter; return ret; } diff --git a/typegraph/node/sdk/src/utils/func_utils.ts b/typegraph/node/sdk/src/utils/func_utils.ts index aa4b3e545d..1c1dacf1d8 100644 --- a/typegraph/node/sdk/src/utils/func_utils.ts +++ b/typegraph/node/sdk/src/utils/func_utils.ts @@ -8,8 +8,8 @@ import { } from "../typegraph.js"; import { ReducePath } from "../gen/interfaces/metatype-typegraph-utils.js"; import { serializeStaticInjection } from "./injection_utils.js"; -import { ArtifactResolutionConfig } from "../gen/interfaces/metatype-typegraph-core.js"; -import { log } from "../log.js"; +import { FinalizeParams } from "../gen/interfaces/metatype-typegraph-core.js"; +import { log } from "../io.js"; export function stringifySymbol(symbol: symbol) { const name = symbol.toString().match(/\((.+)\)/)?.[1]; @@ -31,9 +31,7 @@ export function buildReduceData( currPath: string[] = [], ): ReducePath[] { if (node === null || node === undefined) { - throw new Error( - `unsupported value "${node}" at ${currPath.join(".")}`, - ); + throw new Error(`unsupported value "${node}" at ${currPath.join(".")}`); } if (node instanceof InheritDef) { paths.push({ @@ -65,9 +63,7 @@ export function buildReduceData( }); return paths; } - throw new Error( - `unsupported type "${typeof node}" at ${currPath.join(".")}`, - ); + throw new Error(`unsupported type "${typeof node}" at ${currPath.join(".")}`); } export function getEnvVariable( @@ -90,11 +86,11 @@ const frozenMemo: Record = {}; /** Create a reusable version of a `TypegraphOutput` */ export function freezeTgOutput( - config: ArtifactResolutionConfig, + config: FinalizeParams, tgOutput: TypegraphOutput, ): TypegraphOutput { - frozenMemo[tgOutput.name] = frozenMemo[tgOutput.name] ?? - tgOutput.serialize(config); + frozenMemo[tgOutput.name] = + frozenMemo[tgOutput.name] ?? tgOutput.serialize(config); return { ...tgOutput, serialize: () => frozenMemo[tgOutput.name], @@ -113,7 +109,9 @@ export async function execRequest( const response = await fetch(url, reqInit); if (!response.ok) { log.debug("error", response.json()); - throw Error(`${errMsg}: request failed with status ${response.status} (${response.statusText})`) + throw Error( + `${errMsg}: request failed with status ${response.status} (${response.statusText})`, + ); } if (response.headers.get("Content-Type") == "application/json") { diff --git a/website/docs/reference/typegate/authentication/index.mdx b/website/docs/reference/typegate/authentication/index.mdx index 38f8b17a5e..a50602aeb3 100644 --- a/website/docs/reference/typegate/authentication/index.mdx +++ b/website/docs/reference/typegate/authentication/index.mdx @@ -15,9 +15,7 @@ Basic authentication is the simplest way to authenticate requests. It is done by | Secrets | `BASIC_[username]=password` | | Header | `Authorization: Basic base64(username:password)` | | Context | `{ username }` | - - + Date: Thu, 6 Jun 2024 08:08:28 +0300 Subject: [PATCH 10/35] feat: task source, task queue, task ref --- examples/metatype.yaml | 4 + .../func-gql/db/migration_lock.toml | 2 +- .../database/migration_lock.toml | 2 +- examples/typegraphs/math.ts | 4 - meta-cli/src/cli/deploy.rs | 459 ++++-------------- meta-cli/src/cli/serialize.rs | 42 +- meta-cli/src/deploy/actors/discovery.rs | 11 +- meta-cli/src/deploy/actors/task.rs | 11 +- meta-cli/src/deploy/actors/task/action.rs | 10 +- meta-cli/src/deploy/actors/task/deploy.rs | 58 ++- .../task/deploy/migration_resolution.rs | 2 - .../deploy/actors/task/deploy/migrations.rs | 277 ++++++----- meta-cli/src/deploy/actors/task/serialize.rs | 56 ++- meta-cli/src/deploy/actors/task_manager.rs | 354 ++++++++++---- meta-cli/src/deploy/actors/watcher.rs | 34 +- meta-cli/src/deploy/push/pusher.rs | 201 -------- meta-cli/src/typegraph/loader/discovery.rs | 2 + typegate/src/typegate/mod.ts | 21 +- typegraph/node/sdk/src/io.ts | 107 ---- typegraph/node/sdk/src/tg_artifact_upload.ts | 7 +- typegraph/node/sdk/src/tg_manage.ts | 3 +- typegraph/node/sdk/src/typegraph.ts | 14 +- typegraph/node/sdk/src/utils/func_utils.ts | 4 +- 23 files changed, 644 insertions(+), 1041 deletions(-) delete mode 100644 meta-cli/src/deploy/actors/task/deploy/migration_resolution.rs diff --git a/examples/metatype.yaml b/examples/metatype.yaml index bc049975cc..25558bb2da 100644 --- a/examples/metatype.yaml +++ b/examples/metatype.yaml @@ -150,6 +150,10 @@ typegraphs: python: exclude: - "**/*" + - "typegraphs/temporal.py" + deno: + exclude: + - "typegraphs/temporal.ts" metagen: targets: diff --git a/examples/migrations/func-gql/db/migration_lock.toml b/examples/migrations/func-gql/db/migration_lock.toml index fbffa92c2b..99e4f20090 100644 --- a/examples/migrations/func-gql/db/migration_lock.toml +++ b/examples/migrations/func-gql/db/migration_lock.toml @@ -1,3 +1,3 @@ # Please do not edit this file manually # It should be added in your version-control system (i.e. Git) -provider = "postgresql" \ No newline at end of file +provider = "postgresql" diff --git a/examples/migrations/quick-start-project/database/migration_lock.toml b/examples/migrations/quick-start-project/database/migration_lock.toml index fbffa92c2b..99e4f20090 100644 --- a/examples/migrations/quick-start-project/database/migration_lock.toml +++ b/examples/migrations/quick-start-project/database/migration_lock.toml @@ -1,3 +1,3 @@ # Please do not edit this file manually # It should be added in your version-control system (i.e. Git) -provider = "postgresql" \ No newline at end of file +provider = "postgresql" diff --git a/examples/typegraphs/math.ts b/examples/typegraphs/math.ts index ccd3f97004..321b15a534 100644 --- a/examples/typegraphs/math.ts +++ b/examples/typegraphs/math.ts @@ -3,8 +3,6 @@ import { Policy, t, typegraph } from "@typegraph/sdk/index.js"; // skip:end import { DenoRuntime } from "@typegraph/sdk/runtimes/deno.js"; -console.log("debug: debug test"); - await typegraph( { name: "math", @@ -56,5 +54,3 @@ await typegraph( ); }, ); - -console.log("debug: end of math.ts"); diff --git a/meta-cli/src/cli/deploy.rs b/meta-cli/src/cli/deploy.rs index 9165ee1ea8..018dabde99 100644 --- a/meta-cli/src/cli/deploy.rs +++ b/meta-cli/src/cli/deploy.rs @@ -3,13 +3,12 @@ use self::actors::task::deploy::{DeployAction, DeployActionGenerator}; use self::actors::task::TaskConfig; -use self::actors::task_manager::{self, StopReason, TaskReason}; +use self::actors::task_manager::{self, StopReason}; use super::{Action, ConfigArgs, NodeArgs}; use crate::com::store::{Command, Endpoint, ServerStore}; use crate::config::Config; use crate::deploy::actors; use crate::deploy::actors::console::ConsoleActor; -use crate::deploy::actors::discovery::DiscoveryActor; use crate::deploy::actors::task_manager::TaskManager; use crate::deploy::actors::watcher::{self, WatcherActor}; use crate::interlude::*; @@ -17,7 +16,6 @@ use crate::secrets::{RawSecrets, Secrets}; use actix_web::dev::ServerHandle; use clap::Parser; use common::node::Node; -use futures::channel::oneshot; use owo_colors::OwoColorize; #[derive(Parser, Debug)] @@ -97,7 +95,7 @@ pub struct Deploy { base_dir: Arc, options: DeployOptions, secrets: RawSecrets, - file: Option>, + file: Option, max_parallel_loads: Option, } @@ -152,14 +150,14 @@ impl Deploy { base_dir: dir.clone(), options, secrets, - file: file.map(|path| path.into()), + file: file.clone(), max_parallel_loads: deploy.max_parallel_loads, }) } } struct CtrlCHandlerData { - watcher: Addr, + watcher: Addr>, task_manager: Addr>, } @@ -198,9 +196,7 @@ impl Action for DeploySubcommand { } else { trace!("running in default mode"); // deploy a single file - let deploy = default_mode::DefaultMode::init(deploy).await?; - let status = deploy.run().await?; - + let status = default_mode::run(deploy).await?; server_handle.unwrap().stop(true).await; status @@ -221,198 +217,86 @@ enum ExitStatus { mod default_mode { //! non-watch mode - use crate::{cli::deploy::default_mode::actors::task::TaskFinishStatus, config::PathOption}; + use task_manager::{TaskManagerInit, TaskSource}; + + use crate::config::PathOption; use self::actors::task::deploy::MigrationAction; use super::*; - pub struct DefaultMode { - deploy: Deploy, - console: Addr, - task_manager: Addr>, - report_rx: oneshot::Receiver>, - // loader_event_rx: mpsc::UnboundedReceiver, - } + pub async fn run(deploy: Deploy) -> Result { + let console = ConsoleActor::new(Arc::clone(&deploy.config)).start(); - impl DefaultMode { - pub async fn init(deploy: Deploy) -> Result { - let console = ConsoleActor::new(Arc::clone(&deploy.config)).start(); - - let mut secrets = deploy.secrets.clone(); - secrets.apply_overrides(&deploy.options.secrets)?; - - // ServerStore::set_secrets(secrets.hydrate(deploy.base_dir.clone()).await?); - - // let (loader_event_tx, loader_event_rx) = mpsc::unbounded_channel(); - - let (report_tx, report_rx) = oneshot::channel(); - - let task_config = TaskConfig::init(deploy.base_dir.clone()); - let action_generator = DeployActionGenerator { - task_config: task_config.into(), - node: deploy.node.clone().into(), - secrets: secrets.hydrate(deploy.base_dir.clone()).await?.into(), - migrations_dir: deploy - .config - .prisma_migrations_base_dir(PathOption::Absolute) - .into(), - default_migration_action: MigrationAction { - apply: true, - create: deploy.options.create_migration, - reset: deploy.options.allow_destructive, - }, - }; - - let task_manager = TaskManager::new( - deploy.config.clone(), - action_generator, - deploy.max_parallel_loads.unwrap_or_else(num_cpus::get), - report_tx, - console.clone(), - ) - .auto_stop() - .start(); - - Ok(Self { - deploy, - console, - task_manager, - report_rx, - }) - } + let mut secrets = deploy.secrets.clone(); + secrets.apply_overrides(&deploy.options.secrets)?; - pub async fn run(self) -> Result { - debug!(file = ?self.deploy.file); + let task_config = TaskConfig::init(deploy.base_dir.clone()); + let action_generator = DeployActionGenerator { + task_config: task_config.into(), + node: deploy.node.clone().into(), + secrets: secrets.hydrate(deploy.base_dir.clone()).await?.into(), + migrations_dir: deploy + .config + .prisma_migrations_base_dir(PathOption::Absolute) + .into(), + default_migration_action: MigrationAction { + apply: true, + create: deploy.options.create_migration, + reset: deploy.options.allow_destructive, + }, + }; - { - let task_manager = self.task_manager.clone(); - ctrlc::set_handler(move || { - debug!("CTRL-C handler"); - task_manager.do_send(task_manager::message::Stop); - // loader.do_send(loader::TryStop(StopBehavior::ExitSuccess)); - }) - } - .context("setting Ctrl-C handler")?; - let _discovery = if let Some(path) = self.deploy.file.clone() { - self.task_manager.do_send(task_manager::message::AddTask { - path, - reason: TaskReason::Discovery, - }); - // self.loader.do_send(LoadModule(file.to_path_buf().into())); - None + let mut init = TaskManagerInit::::new( + deploy.config.clone(), + action_generator, + console.clone(), + if let Some(file) = &deploy.file { + TaskSource::Static(vec![file.clone()]) } else { - Some( - DiscoveryActor::new( - Arc::clone(&self.deploy.config), - self.task_manager.clone(), - self.console.clone(), - Arc::clone(&self.deploy.base_dir), - ) - .start(), - ) - }; - - let report = self.report_rx.await?; - let summary = report.summary(); - println!("Result:\n{}", summary.text); - - match report.stop_reason { - StopReason::Natural => { - if summary.success { - Ok(ExitStatus::Success) - } else { - Ok(ExitStatus::Failure) - } - } - StopReason::Restart => { - unreachable!("TaskManager should not restart on the default mode") + TaskSource::Discovery(deploy.base_dir) + }, + ) + .max_retry_count(3); + + if let Some(max_parallel_loads) = deploy.max_parallel_loads { + init = init.max_parallel_tasks(max_parallel_loads); + } + let report = init.run().await; + + let summary = report.summary(); + println!("Result:\n{}", summary.text); + + match report.stop_reason { + StopReason::Natural => { + if summary.success { + Ok(ExitStatus::Success) + } else { + Ok(ExitStatus::Failure) } - StopReason::Manual => { - if summary.success { - Ok(ExitStatus::Success) - } else { - Ok(ExitStatus::Failure) - } - } // TODO read report - StopReason::ManualForced => Ok(ExitStatus::Failure), - StopReason::Error => { - // error should have already been reported + } + StopReason::Restart => { + unreachable!("TaskManager should not restart on the default mode") + } + StopReason::Manual => { + if summary.success { + Ok(ExitStatus::Success) + } else { Ok(ExitStatus::Failure) } + } // TODO read report + StopReason::ManualForced => Ok(ExitStatus::Failure), + StopReason::Error => { + // error should have already been reported + Ok(ExitStatus::Failure) } } - - // #[tracing::instrument(skip(self))] - // fn handle_loaded_typegraphs(self) -> oneshot::Receiver> { - // let mut event_rx = self.loader_event_rx; - // let console = self.console.clone(); - // let (tx, rx) = oneshot::channel(); - // let fut = async move { - // let mut errors = vec![]; - // while let Some(event) = event_rx.recv().await { - // match event { - // LoaderEvent::Typegraph(tg_infos) => { - // let responses = match tg_infos.get_responses_or_fail() { - // Ok(val) => val, - // Err(err) => { - // console.error(format!( - // "failed pushing typegraph at {:?}: {err:#}", - // tg_infos.path.display().cyan(), - // )); - // errors.push((tg_infos.path.clone(), err)); - // continue; - // } - // }; - // for (name, res) in responses.iter() { - // match PushResult::new( - // self.console.clone(), - // self.loader.clone(), - // res.clone(), - // ) { - // Ok(push) => push.finalize().await.unwrap(), - // Err(err) => { - // console.error(format!( - // "failed pushing typegraph {:?} at {:?}: {err:#}", - // name.yellow(), - // tg_infos.path.display().cyan(), - // )); - // errors.push((tg_infos.path.clone(), err)); - // } - // } - // } - // } - // LoaderEvent::Stopped(b) => { - // if let StopBehavior::ExitFailure(msg) = b { - // error!("LoaderActor exit failure: {}", msg.red()); - // } - // } - // } - // } - // trace!("typegraph channel closed."); - // if errors.is_empty() { - // tx.send(Ok(())).unwrap_or_log(); - // } else { - // tx.send(Err(errors.into_iter().fold( - // ferr!("loader encountered errors").suppress_backtrace(true), - // |report, (path, err)| { - // report.section( - // format!("{}", format!("{err:#}").red()) - // .header(format!("{}:", path.display().purple())), - // ) - // }, - // ))) - // .unwrap_or_log(); - // } - // // pusher address will be dropped when both loops are done - // }; - // Arbiter::current().spawn(fut.in_current_span()); - // rx - // } } } mod watch_mode { + use task_manager::{TaskManagerInit, TaskSource}; + use crate::config::PathOption; use self::actors::task::deploy::MigrationAction; @@ -421,6 +305,10 @@ mod watch_mode { #[tracing::instrument] pub async fn enter_watch_mode(deploy: Deploy) -> Result<()> { + if let Some(_) = &deploy.file { + bail!("Cannot use --file in watch mode"); + } + let console = ConsoleActor::new(Arc::clone(&deploy.config)).start(); let ctrlc_handler_data = Arc::new(std::sync::Mutex::new(None)); @@ -458,190 +346,37 @@ mod watch_mode { }, }; - loop { - // ServerStore::set_secrets(secrets.hydrate(deploy.base_dir.clone()).await?); - - // let (loader_event_tx, loader_event_rx) = mpsc::unbounded_channel(); - - let (report_tx, report_rx) = oneshot::channel(); - - let task_manager = TaskManager::new( - deploy.config.clone(), - action_generator.clone(), - deploy.max_parallel_loads.unwrap_or_else(num_cpus::get), - report_tx, - console.clone(), - ) - .start(); - - let _discovery = DiscoveryActor::new( - Arc::clone(&deploy.config), - task_manager.clone(), - console.clone(), - Arc::clone(&deploy.base_dir), - ) - .start(); - - let watcher = WatcherActor::new( - Arc::clone(&deploy.config), - deploy.base_dir.clone(), - task_manager.clone(), - console.clone(), - )? - .start(); - - let actor_system = ActorSystem { - console: console.clone(), - watcher, - task_manager: task_manager.clone(), - }; + // ServerStore::set_secrets(secrets.hydrate(deploy.base_dir.clone()).await?); - // actor_system.handle_loaded_typegraphs(loader_event_rx); - // actor_system.handle_watch_events(watch_event_rx); - actor_system.update_ctrlc_handler(ctrlc_handler_data.clone()); + // let (loader_event_tx, loader_event_rx) = mpsc::unbounded_channel(); - let report = report_rx.await?; + let mut init = TaskManagerInit::::new( + deploy.config.clone(), + action_generator.clone(), + console.clone(), + TaskSource::DiscoveryAndWatch(deploy.base_dir), + ) + .max_retry_count(3); - match report.stop_reason { - StopReason::Natural => { - unreachable!("TaskManager should not stop naturally on watch mode") - } - StopReason::Restart => { - continue; - } - StopReason::Manual => { - return Err(eyre::eyre!("tasks manually stopped")); - } - StopReason::ManualForced => { - return Err(eyre::eyre!("tasks manually stopped (forced)")); - } - StopReason::Error => return Err(eyre::eyre!("failed")), - } + if let Some(max_parallel_loads) = deploy.max_parallel_loads { + init = init.max_parallel_tasks(max_parallel_loads); } - } + let report = init.run().await; - struct ActorSystem { - console: Addr, - watcher: Addr, - task_manager: Addr>, - } - - impl ActorSystem { - // #[tracing::instrument(skip(self))] - // fn handle_loaded_typegraphs(&self, event_rx: mpsc::UnboundedReceiver) { - // let console = self.console.clone(); - // let loader = self.loader.clone(); - // let fut = async move { - // let mut event_rx = event_rx; - // while let Some(event) = event_rx.recv().await { - // match event { - // LoaderEvent::Typegraph(tg_infos) => { - // let responses = ServerStore::get_responses_or_fail(&tg_infos.path) - // .unwrap_or_log() - // .as_ref() - // .to_owned(); - // for (name, response) in responses.into_iter() { - // match PushResult::new(console.clone(), loader.clone(), response) { - // Ok(push) => { - // if let Err(err) = push.finalize().await { - // panic!("{err:#}"); - // } - // RetryManager::clear_counter(&tg_infos.path); - // } - // Err(err) => { - // let tg_path = tg_infos.path.clone(); - // console.error(format!( - // "failed pushing typegraph {name:?} at {tg_path:?}: {err:#}", - // )); - // if let Some(delay) = RetryManager::next_delay(&tg_path) { - // console.info(format!( - // "retry {}/{}, retrying after {}s of {:?}", - // delay.retry, - // delay.max, - // delay.duration.as_secs(), - // tg_path.display(), - // )); - // tokio::time::sleep(delay.duration).await; - // loader.do_send(LoadModule(Arc::new(tg_path))); - // } - // } - // } - // } - // } - // LoaderEvent::Stopped(b) => { - // if let StopBehavior::ExitFailure(msg) = b { - // panic!("{msg}"); - // } - // } - // } - // } - // trace!("Typegraph channel closed."); - // // pusher address will be dropped when both loops are done - // }; - // Arbiter::current().spawn(fut.in_current_span()); - // } - - // #[tracing::instrument(skip(self))] - // fn handle_watch_events( - // &self, - // watch_event_rx: mpsc::UnboundedReceiver, - // ) { - // let console = self.console.clone(); - // let watcher = self.watcher.clone(); - // let loader = self.loader.clone(); - // let fut = async move { - // let mut watch_event_rx = watch_event_rx; - // while let Some(event) = watch_event_rx.recv().await { - // use actors::watcher::Event as E; - // match event { - // E::ConfigChanged => { - // RetryManager::reset(); - // - // console.warning("metatype configuration file changed".to_string()); - // console.warning("reloading everything".to_string()); - // - // loader.do_send(loader::TryStop(StopBehavior::Restart)); - // watcher.do_send(actors::watcher::Stop); - // } - // E::TypegraphModuleChanged { typegraph_module } => { - // RetryManager::clear_counter(&typegraph_module); - // loader.do_send(ReloadModule( - // typegraph_module.into(), - // ReloadReason::FileChanged, - // )); - // } - // E::TypegraphModuleDeleted { typegraph_module } => { - // RetryManager::clear_counter(&typegraph_module); - // - // // TODO internally by the watcher?? - // watcher.do_send(actors::watcher::RemoveTypegraph( - // typegraph_module.clone(), - // )); - // // TODO delete typegraph in typegate?? - // } - // E::DependencyChanged { - // typegraph_module, - // dependency_path, - // } => { - // RetryManager::clear_counter(&typegraph_module); - // - // loader.do_send(ReloadModule( - // typegraph_module.into(), - // ReloadReason::DependencyChanged(dependency_path), - // )); - // } - // } - // } - // trace!("watcher event channel closed"); - // }; - // Arbiter::current().spawn(fut.in_current_span()); - // } - - fn update_ctrlc_handler(&self, data: Arc>>) { - *data.lock().unwrap() = Some(CtrlCHandlerData { - watcher: self.watcher.clone(), - task_manager: self.task_manager.clone(), - }); + match report.stop_reason { + StopReason::Natural => { + unreachable!("TaskManager should not stop naturally on watch mode") + } + StopReason::Restart => { + unreachable!("Restarting should not stop the TaskManager") + } + StopReason::Manual => { + return Err(eyre::eyre!("tasks manually stopped")); + } + StopReason::ManualForced => { + return Err(eyre::eyre!("tasks manually stopped (forced)")); + } + StopReason::Error => return Err(eyre::eyre!("failed")), } } } diff --git a/meta-cli/src/cli/serialize.rs b/meta-cli/src/cli/serialize.rs index ebec4a304d..76ac2c986f 100644 --- a/meta-cli/src/cli/serialize.rs +++ b/meta-cli/src/cli/serialize.rs @@ -1,27 +1,21 @@ -use crate::deploy::actors::task::serialize::{ - SerializeAction, SerializeActionGenerator, SerializeError, -}; -use crate::deploy::actors::task::{TaskConfig, TaskFinishStatus}; -use crate::deploy::actors::task_manager::message::AddTask; -use crate::deploy::actors::task_manager::{TaskManager, TaskReason}; // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use crate::interlude::*; use super::{Action, ConfigArgs}; use crate::com::store::{Command, ServerStore}; use crate::config::Config; use crate::deploy::actors::console::ConsoleActor; -use crate::deploy::actors::loader::{LoadModule, LoaderActor, LoaderEvent, StopBehavior}; -use actix::prelude::*; +use crate::deploy::actors::task::serialize::{ + SerializeAction, SerializeActionGenerator, SerializeError, +}; +use crate::deploy::actors::task::{TaskConfig, TaskFinishStatus}; +use crate::deploy::actors::task_manager::{TaskManagerInit, TaskSource}; +use crate::interlude::*; use actix_web::dev::ServerHandle; use clap::Parser; -use common::typegraph::Typegraph; use core::fmt::Debug; -use futures::channel::oneshot; use std::io::{self, Write}; use tokio::io::AsyncWriteExt; -use tokio::sync::mpsc; #[derive(Parser, Debug)] pub struct Serialize { @@ -79,27 +73,25 @@ impl Action for Serialize { let console = ConsoleActor::new(Arc::clone(&config)).start(); - let (report_tx, report_rx) = oneshot::channel(); - let action_generator = SerializeActionGenerator::new(TaskConfig::init(args.dir().into())); - // TODO fail_fast - let task_manager: Addr> = - TaskManager::new(config.clone(), action_generator, 1, report_tx, console) - .auto_stop() - .start(); if self.files.is_empty() { bail!("no file provided"); } - for path in self.files.iter() { - task_manager.do_send(AddTask { - path: path.as_path().into(), - reason: TaskReason::Discovery, - }); + // TODO fail_fast + let mut init = TaskManagerInit::::new( + config.clone(), + action_generator, + console, + TaskSource::Static(self.files.clone()), + ); + if let Some(max_parallel_tasks) = self.max_parallel_loads { + init = init.max_parallel_tasks(max_parallel_tasks); } - let report = report_rx.await?; + let report = init.run().await; + // TODO no need to report errors let tgs = report .entries diff --git a/meta-cli/src/deploy/actors/discovery.rs b/meta-cli/src/deploy/actors/discovery.rs index fc801c9254..78ef505480 100644 --- a/meta-cli/src/deploy/actors/discovery.rs +++ b/meta-cli/src/deploy/actors/discovery.rs @@ -10,10 +10,11 @@ use crate::{config::Config, typegraph::loader::Discovery}; use super::console::{Console, ConsoleActor}; use super::task::action::TaskAction; -use super::task_manager::TaskManager; +use super::task_manager::{TaskGenerator, TaskManager}; pub struct DiscoveryActor { config: Arc, + task_generator: TaskGenerator, task_manager: Addr>, console: Addr, directory: Arc, @@ -22,12 +23,14 @@ pub struct DiscoveryActor { impl DiscoveryActor { pub fn new( config: Arc, + task_generator: TaskGenerator, task_manager: Addr>, console: Addr, directory: Arc, ) -> Self { Self { config, + task_generator, task_manager, console, directory, @@ -44,20 +47,22 @@ impl Actor for DiscoveryActor { #[tracing::instrument(skip(self))] fn started(&mut self, ctx: &mut Self::Context) { - log::trace!("DiscoveryActor started"); + log::trace!("DiscoveryActor started; directory={:?}", self.directory); let config = Arc::clone(&self.config); let dir = self.directory.clone(); let task_manager = self.task_manager.clone(); let console = self.console.clone(); let discovery = ctx.address(); + let task_generator = self.task_generator.clone(); + let fut = async move { match Discovery::new(config, dir.to_path_buf()) .start(|path| match path { Ok(path) => { let rel_path = diff_paths(&path, &dir).unwrap(); task_manager.do_send(task_manager::message::AddTask { - path: rel_path.into(), + task_ref: task_generator.generate(rel_path.into(), 0), reason: TaskReason::Discovery, }); } diff --git a/meta-cli/src/deploy/actors/task.rs b/meta-cli/src/deploy/actors/task.rs index 2f867b7cfc..98a3c21a70 100644 --- a/meta-cli/src/deploy/actors/task.rs +++ b/meta-cli/src/deploy/actors/task.rs @@ -158,11 +158,11 @@ where } fn get_path(&self) -> &Path { - self.action.get_path() + &self.action.get_task_ref().path } fn get_path_owned(&self) -> Arc { - self.action.get_path_owned() + self.action.get_task_ref().path.clone() } } @@ -472,8 +472,8 @@ impl Handler> for TaskActor { std::mem::swap(res, &mut self.collected_output); } self.task_manager - .do_send(task_manager::message::UpdateTaskStatus::Finished { - path: self.get_path_owned(), + .do_send(task_manager::message::TaskFinished { + task_ref: self.action.get_task_ref().clone(), status: message.0, }); ctx.stop(); @@ -516,14 +516,11 @@ impl Handler for TaskActor { impl TaskActor { fn send_rpc_response(&mut self, response: RpcResponse, ctx: &mut Context) { - let response_id = response.id; match serde_json::to_string(&response) { Ok(response) => { let stdin = self.process_stdin.clone().unwrap(); - let console = self.console.clone(); let fut = async move { let mut stdin = stdin.lock().await; - console.debug(format!("sending rpc response #{response_id}")); stdin .write_all(response.as_bytes()) .await diff --git a/meta-cli/src/deploy/actors/task/action.rs b/meta-cli/src/deploy/actors/task/action.rs index cf2aadfdd5..1597a243e8 100644 --- a/meta-cli/src/deploy/actors/task/action.rs +++ b/meta-cli/src/deploy/actors/task/action.rs @@ -2,20 +2,19 @@ // SPDX-License-Identifier: MPL-2.0 use super::TaskActor; -use crate::deploy::actors::task_manager::TaskManager; +use crate::deploy::actors::task_manager::{TaskManager, TaskRef}; use crate::interlude::*; use crate::{config::Config, deploy::actors::console::ConsoleActor}; use std::{path::Path, sync::Arc}; -use tokio::{process::Command, sync::OwnedSemaphorePermit}; +use tokio::process::Command; pub trait TaskActionGenerator: Clone { type Action: TaskAction; fn generate( &self, - path: Arc, + task_ref: TaskRef, followup: Option<::Followup>, - permit: OwnedSemaphorePermit, ) -> Self::Action; } @@ -41,8 +40,7 @@ pub trait TaskAction: std::fmt::Debug + Clone + Send + Unpin { type Generator: TaskActionGenerator + Unpin; async fn get_command(&self) -> Result; - fn get_path(&self) -> &Path; - fn get_path_owned(&self) -> Arc; + fn get_task_ref(&self) -> &TaskRef; fn get_start_message(&self) -> String; fn get_error_message(&self, err: &str) -> String; diff --git a/meta-cli/src/deploy/actors/task/deploy.rs b/meta-cli/src/deploy/actors/task/deploy.rs index aa6a8623b1..b756547e85 100644 --- a/meta-cli/src/deploy/actors/task/deploy.rs +++ b/meta-cli/src/deploy/actors/task/deploy.rs @@ -1,3 +1,6 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + mod migration_resolution; mod migrations; @@ -8,14 +11,14 @@ use super::action::{ use super::command::CommandBuilder; use super::TaskConfig; use crate::deploy::actors::console::Console; -use crate::deploy::actors::task_manager::TaskManager; +use crate::deploy::actors::task_manager::{TaskManager, TaskRef}; use crate::interlude::*; use crate::secrets::Secrets; use color_eyre::owo_colors::OwoColorize; use common::node::Node; use serde::Deserialize; use std::{path::Path, sync::Arc}; -use tokio::{process::Command, sync::OwnedSemaphorePermit}; +use tokio::process::Command; pub type DeployAction = Arc; @@ -34,15 +37,13 @@ pub struct PrismaRuntimeId { #[derive(Debug)] pub struct DeployActionInner { - path: Arc, + task_ref: TaskRef, task_config: Arc, node: Arc, secrets: Arc, migrations_dir: Arc, migration_actions: HashMap, default_migration_action: MigrationAction, - #[allow(unused)] - permit: OwnedSemaphorePermit, } #[derive(Clone)] @@ -57,12 +58,7 @@ pub struct DeployActionGenerator { impl TaskActionGenerator for DeployActionGenerator { type Action = DeployAction; - fn generate( - &self, - path: Arc, - followup: Option, - permit: OwnedSemaphorePermit, - ) -> Self::Action { + fn generate(&self, task_ref: TaskRef, followup: Option) -> Self::Action { let (default_migration_action, migration_actions) = if let Some(followup) = followup { ( Default::default(), @@ -88,14 +84,13 @@ impl TaskActionGenerator for DeployActionGenerator { }; DeployActionInner { - path, + task_ref, task_config: self.task_config.clone(), node: self.node.clone(), secrets: self.secrets.clone(), migrations_dir: self.migrations_dir.clone(), migration_actions, default_migration_action, - permit, } .into() } @@ -162,7 +157,11 @@ impl TaskAction for DeployAction { async fn get_command(&self) -> Result { CommandBuilder { - path: self.task_config.base_dir.to_path_buf().join(&self.path), + path: self + .task_config + .base_dir + .to_path_buf() + .join(&self.task_ref.path), task_config: self.task_config.clone(), action_env: "deploy", } @@ -170,23 +169,18 @@ impl TaskAction for DeployAction { .await } - fn get_path(&self) -> &Path { - return &self.path; - } - - fn get_path_owned(&self) -> Arc { - return self.path.clone(); - } - fn get_start_message(&self) -> String { - format!("starting deployment process for {:?}", self.path) + format!( + "starting deployment process for {:?}", + self.task_ref.path.display().yellow() + ) } fn get_error_message(&self, err: &str) -> String { format!( - "{icon} failed to deploy typegraph(s) from {path:?}: {err}", + "{icon} failed to deploy typegraph(s) from {path}: {err}", icon = "✗".red(), - path = self.path, + path = self.task_ref.path.display().yellow(), err = err, ) } @@ -194,7 +188,7 @@ impl TaskAction for DeployAction { fn finalize(&self, res: &ActionResult, ctx: ActionFinalizeContext) { match res { Ok(data) => { - let scope = format!("({path})", path = self.path.display()); + let scope = format!("({path})", path = self.task_ref.path.display()); let scope = scope.yellow(); for message in &data.messages { @@ -217,7 +211,7 @@ impl TaskAction for DeployAction { "{icon} error while deploying typegraph {name} from {path}", icon = "✗".red(), name = tg_name.cyan(), - path = self.path.display().yellow(), + path = self.task_ref.path.display().yellow(), )); self.handle_push_failure(&tg_name, failure, &ctx, &scope); @@ -227,7 +221,7 @@ impl TaskAction for DeployAction { "{icon} successfully deployed typegraph {name} from {path}", icon = "✓".green(), name = tg_name.cyan(), - path = self.path.display().yellow(), + path = self.task_ref.path.display().yellow(), )); } } @@ -235,10 +229,10 @@ impl TaskAction for DeployAction { Err(data) => { ctx.console.error(format!( - "{icon} failed to deploy typegraph {name} from {path:?}: {err}", + "{icon} failed to deploy typegraph {name} from {path}: {err}", icon = "✗".red(), name = data.get_typegraph_name().cyan(), - path = self.path, + path = self.task_ref.path.display().yellow(), err = data.error, )); } @@ -276,6 +270,10 @@ impl TaskAction for DeployAction { "migrationsDir": self.migrations_dir.to_path_buf().join(typegraph), }) } + + fn get_task_ref(&self) -> &crate::deploy::actors::task_manager::TaskRef { + &self.task_ref + } } impl FollowupTaskConfig for FollowupDeployConfig { diff --git a/meta-cli/src/deploy/actors/task/deploy/migration_resolution.rs b/meta-cli/src/deploy/actors/task/deploy/migration_resolution.rs deleted file mode 100644 index 139597f9cb..0000000000 --- a/meta-cli/src/deploy/actors/task/deploy/migration_resolution.rs +++ /dev/null @@ -1,2 +0,0 @@ - - diff --git a/meta-cli/src/deploy/actors/task/deploy/migrations.rs b/meta-cli/src/deploy/actors/task/deploy/migrations.rs index ccd36a38e6..ed90cc780d 100644 --- a/meta-cli/src/deploy/actors/task/deploy/migrations.rs +++ b/meta-cli/src/deploy/actors/task/deploy/migrations.rs @@ -1,10 +1,13 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + use color_eyre::owo_colors::OwoColorize; use super::{DeployAction, DeployActionInner, Migration, MigrationActionOverride, PrismaRuntimeId}; +use crate::deploy::actors::console::input::{Confirm, ConfirmHandler, Select}; use crate::deploy::actors::console::Console; -use crate::deploy::actors::console::input::{ConfirmHandler, Confirm, Select}; -use crate::deploy::actors::task::TaskActor; use crate::deploy::actors::task::action::ActionFinalizeContext; +use crate::deploy::actors::task::TaskActor; use crate::interlude::*; #[derive(Deserialize, Debug, Clone)] @@ -101,16 +104,15 @@ impl DeployActionInner { } Ok(PushFailure::NullConstraintViolation(error)) => { - ctx.task - .do_send(message::ResolveConstraintViolation { - typegraph: tg_name.to_string(), - runtime: error.runtime_name.clone(), - column: error.column.clone(), - migration: error.migration_name.clone(), - is_new_column: error.is_new_column, - table: error.table.clone(), - message: error.message.clone(), - }); + ctx.task.do_send(message::ResolveConstraintViolation { + typegraph: tg_name.to_string(), + runtime: error.runtime_name.clone(), + column: error.column.clone(), + migration: error.migration_name.clone(), + is_new_column: error.is_new_column, + table: error.table.clone(), + message: error.message.clone(), + }); } Err(err) => { @@ -144,7 +146,7 @@ pub mod message { #[derive(Message)] #[rtype(result = "()")] - pub (super) struct ResolveConstraintViolation { + pub(super) struct ResolveConstraintViolation { pub typegraph: String, pub runtime: String, pub column: String, @@ -169,7 +171,6 @@ pub mod message { pub runtime: String, pub migration: String, } - } use message::*; @@ -199,7 +200,7 @@ impl Handler for TaskActor { runtime, message, } = msg; - let scope = format!("({})", self.action.path.display()); + let scope = format!("({})", self.action.task_ref.path.display()); self.console.error(format!("{scope} {message}")); self.console.warning(format!( @@ -227,26 +228,27 @@ impl Handler for TaskActor { tg_name: typegraph, runtime_name: runtime, }) - + ).await; if let Err(err) = res { console.error(format!("failed to read user input: {err}", err = err)); } - - }; + }; ctx.spawn(fut.in_current_span().into_actor(self)); -}} + } +} impl Handler for TaskActor { type Result = (); fn handle(&mut self, msg: ResetDatabase, _: &mut Self::Context) { - self.followup_task.migrations.push(( PrismaRuntimeId { - typegraph: msg.typegraph.clone(), - name: msg.runtime.clone(), - }, - MigrationActionOverride::ResetDatabase, + self.followup_task.migrations.push(( + PrismaRuntimeId { + typegraph: msg.typegraph.clone(), + name: msg.runtime.clone(), + }, + MigrationActionOverride::ResetDatabase, )); // /// Set `reset` to `true` for the specified prisma runtime + re-run the typegraph @@ -284,7 +286,7 @@ impl Handler for TaskActor { message, } = msg; - let scope = format!("({})", self.action.path.display()); + let scope = format!("({})", self.action.task_ref.path.display()); let scope = scope.yellow(); self.console.error(format!("{scope} {message}")); @@ -292,42 +294,45 @@ impl Handler for TaskActor { if is_new_column { self.console.info(format!("{scope} manually edit the migration {migration} or remove the migration and set a default value")); - - let remove_latest = options::RemoveLatestMigration { - task: ctx.address(), - typegraph: typegraph.clone(), - runtime: runtime.clone(), - migration: migration.clone(), - }; - - let manual = options::ManualResolution { - task: ctx.address(), - typegraph: typegraph.clone(), - runtime: runtime.clone(), - migration: migration.clone(), - message: Some(format!( - "Set a default value for the column `{}` in the table `{}`", - column, table - )), - }; - - let reset = options::ForceReset { - task: ctx.address(), - typegraph: typegraph.clone(), - runtime: runtime.clone(), - }; - - let fut = async move { - let res = Select::new(self.console.clone(), "Choose one of the following options".to_string()).interact(&[Box::new(remove_latest), Box::new(manual), Box::new(reset)]).await; - if let Err(err) = res { - self.console.error(format!("failed to read user input: {err}", err = err)); - } - - }; - + let remove_latest = options::RemoveLatestMigration { + task: ctx.address(), + typegraph: typegraph.clone(), + runtime: runtime.clone(), + migration: migration.clone(), + }; + + let manual = options::ManualResolution { + task: ctx.address(), + typegraph: typegraph.clone(), + runtime: runtime.clone(), + migration: migration.clone(), + message: Some(format!( + "Set a default value for the column `{}` in the table `{}`", + column, table + )), + }; + + let reset = options::ForceReset { + task: ctx.address(), + typegraph: typegraph.clone(), + runtime: runtime.clone(), + }; + + let fut = async move { + let res = Select::new( + self.console.clone(), + "Choose one of the following options".to_string(), + ) + .interact(&[Box::new(remove_latest), Box::new(manual), Box::new(reset)]) + .await; + if let Err(err) = res { + self.console + .error(format!("failed to read user input: {err}", err = err)); + } + }; + } } } -} impl Handler for TaskActor { type Result = (); @@ -339,12 +344,16 @@ impl Handler for TaskActor { migration, } = msg; - let migration_path = self.config.prisma_migration_dir_abs(&typegraph).join(&runtime).join(&migration); + let migration_path = self + .config + .prisma_migration_dir_abs(&typegraph) + .join(&runtime) + .join(&migration); // let typegraph = typegraph.clone(); // let runtime_name = runtime.clone(); let console = self.console.clone(); - let typegraph_path = self.action.path.clone(); + let typegraph_path = self.action.task_ref.path.clone(); let addr = ctx.address(); let fut = async move { @@ -357,13 +366,13 @@ impl Handler for TaskActor { typegraph_path.display().to_string().bold() )); - addr.do_send(message::ResetDatabase { - typegraph, - runtime, - }); + addr.do_send(message::ResetDatabase { typegraph, runtime }); } Err(err) => { - console.error(format!("Failed to remove migration directory: {:?}", migration_path)); + console.error(format!( + "Failed to remove migration directory: {:?}", + migration_path + )); console.error(format!("{err}", err = err)); } } @@ -377,8 +386,16 @@ impl Handler for TaskActor { type Result = (); fn handle(&mut self, msg: message::WaitForManualResolution, ctx: &mut Self::Context) { - let migration_path = self.config.prisma_migration_dir_abs(&msg.typegraph).join(&msg.runtime).join(msg.migration).join("migration.sql"); - eprintln!("Edit the migration file at {:?} then press enter to continue...", migration_path); + let migration_path = self + .config + .prisma_migration_dir_abs(&msg.typegraph) + .join(&msg.runtime) + .join(msg.migration) + .join("migration.sql"); + eprintln!( + "Edit the migration file at {:?} then press enter to continue...", + migration_path + ); let console = self.console.clone(); let addr = ctx.address(); @@ -395,84 +412,80 @@ impl Handler for TaskActor { } mod options { - use crate::deploy::actors::console::input::{SelectOption, OptionLabel}; - use crate::deploy::actors::task::TaskActor; + use crate::deploy::actors::console::input::{OptionLabel, SelectOption}; use crate::deploy::actors::task::deploy::DeployAction; + use crate::deploy::actors::task::TaskActor; use crate::interlude::*; -#[derive(Debug)] -pub struct RemoveLatestMigration { - pub task: Addr>, - pub typegraph: String, - pub runtime: String, - pub migration: String, // is this necessary?? -} - - -impl SelectOption for RemoveLatestMigration { - fn on_select(&self) { - self.task.do_send(super::message::RemoveLatestMigration { - typegraph: self.typegraph.clone(), - runtime: self.runtime.clone(), - migration: self.migration.clone(), - }); + #[derive(Debug)] + pub struct RemoveLatestMigration { + pub task: Addr>, + pub typegraph: String, + pub runtime: String, + pub migration: String, // is this necessary?? } - fn label(&self) -> OptionLabel<'_> { - OptionLabel::new("Remove the latest migration.") - } -} - -#[derive(Debug)] -pub struct ManualResolution { - pub task: Addr>, - pub typegraph: String, - pub runtime: String, - pub migration: String, - pub message: Option, -} + impl SelectOption for RemoveLatestMigration { + fn on_select(&self) { + self.task.do_send(super::message::RemoveLatestMigration { + typegraph: self.typegraph.clone(), + runtime: self.runtime.clone(), + migration: self.migration.clone(), + }); + } -impl SelectOption for ManualResolution { - fn on_select(&self) { - self.task.do_send(super::message::WaitForManualResolution { - typegraph: self.typegraph.clone(), - runtime: self.runtime.clone(), - migration: self.migration.clone(), - }); + fn label(&self) -> OptionLabel<'_> { + OptionLabel::new("Remove the latest migration.") + } } - fn label(&self) -> OptionLabel<'_> { - let label = OptionLabel::new("Manually resolve the migration."); - if let Some(message) = &self.message { - label.with_secondary(format!("Edit the migration file: {}.", message)) - } else { - label - } + #[derive(Debug)] + pub struct ManualResolution { + pub task: Addr>, + pub typegraph: String, + pub runtime: String, + pub migration: String, + pub message: Option, } -} -#[derive(Debug)] -pub struct ForceReset { - pub task: Addr>, - pub typegraph: String, - pub runtime: String, -} + impl SelectOption for ManualResolution { + fn on_select(&self) { + self.task.do_send(super::message::WaitForManualResolution { + typegraph: self.typegraph.clone(), + runtime: self.runtime.clone(), + migration: self.migration.clone(), + }); + } -impl SelectOption for ForceReset { - fn on_select(&self) { - self.task.do_send(super::message::ResetDatabase { - typegraph: self.typegraph.clone(), - runtime: self.runtime.clone(), - }); + fn label(&self) -> OptionLabel<'_> { + let label = OptionLabel::new("Manually resolve the migration."); + if let Some(message) = &self.message { + label.with_secondary(format!("Edit the migration file: {}.", message)) + } else { + label + } + } } - fn label(&self) -> OptionLabel<'_> { - OptionLabel::new("Force reset the development database.").with_secondary( - "Warning: The failed migration will potentially fail again in deployment.", - ) + #[derive(Debug)] + pub struct ForceReset { + pub task: Addr>, + pub typegraph: String, + pub runtime: String, } -} + impl SelectOption for ForceReset { + fn on_select(&self) { + self.task.do_send(super::message::ResetDatabase { + typegraph: self.typegraph.clone(), + runtime: self.runtime.clone(), + }); + } + fn label(&self) -> OptionLabel<'_> { + OptionLabel::new("Force reset the development database.").with_secondary( + "Warning: The failed migration will potentially fail again in deployment.", + ) + } + } } - diff --git a/meta-cli/src/deploy/actors/task/serialize.rs b/meta-cli/src/deploy/actors/task/serialize.rs index a249e3823f..8824bc8b52 100644 --- a/meta-cli/src/deploy/actors/task/serialize.rs +++ b/meta-cli/src/deploy/actors/task/serialize.rs @@ -1,3 +1,6 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + use super::action::{ ActionFinalizeContext, ActionResult, FollowupTaskConfig, OutputData, TaskAction, TaskActionGenerator, @@ -6,22 +9,20 @@ use super::command::CommandBuilder; use super::TaskConfig; use crate::com::store::MigrationAction; use crate::deploy::actors::console::Console; -use crate::deploy::actors::task_manager::TaskManager; +use crate::deploy::actors::task_manager::{TaskManager, TaskRef}; use crate::interlude::*; use color_eyre::owo_colors::OwoColorize; use common::typegraph::Typegraph; use serde::Deserialize; -use std::{path::Path, sync::Arc}; -use tokio::{process::Command, sync::OwnedSemaphorePermit}; +use std::sync::Arc; +use tokio::process::Command; pub type SerializeAction = Arc; #[derive(Debug)] pub struct SerializeActionInner { - path: Arc, + task_ref: TaskRef, task_config: Arc, - #[allow(unused)] - permit: OwnedSemaphorePermit, } #[derive(Clone)] @@ -40,16 +41,10 @@ impl SerializeActionGenerator { impl TaskActionGenerator for SerializeActionGenerator { type Action = SerializeAction; - fn generate( - &self, - path: Arc, - followup: Option<()>, - permit: OwnedSemaphorePermit, - ) -> Self::Action { + fn generate(&self, task_ref: TaskRef, followup: Option<()>) -> Self::Action { SerializeActionInner { - path, + task_ref, task_config: self.task_config.clone(), - permit, } .into() } @@ -85,7 +80,11 @@ impl TaskAction for SerializeAction { async fn get_command(&self) -> Result { CommandBuilder { - path: self.task_config.base_dir.to_path_buf().join(&self.path), + path: self + .task_config + .base_dir + .to_path_buf() + .join(&self.task_ref.path), task_config: self.task_config.clone(), action_env: "serialize", } @@ -93,23 +92,18 @@ impl TaskAction for SerializeAction { .await } - fn get_path(&self) -> &Path { - return &self.path; - } - - fn get_path_owned(&self) -> Arc { - return self.path.clone(); - } - fn get_start_message(&self) -> String { - format!("starting serialization process for {:?}", self.path) + format!( + "starting serialization process for {:?}", + self.task_ref.path.display().yellow() + ) } fn get_error_message(&self, err: &str) -> String { format!( "{icon} failed to serialize typegraph(s) from {path:?}: {err}", icon = "✗".red(), - path = self.path, + path = self.task_ref.path.display().yellow(), err = err, ) } @@ -118,18 +112,18 @@ impl TaskAction for SerializeAction { match res { Ok(data) => { ctx.console.info(format!( - "{icon} successfully serialized typegraph {name} from {path:?}", + "{icon} successfully serialized typegraph {name} from {path}", icon = "✓".green(), name = data.get_typegraph_name().cyan(), - path = self.path, + path = self.task_ref.path.display().yellow(), )); } Err(output) => { ctx.console.error(format!( - "{icon} failed to serialize typegraph {name} from {path:?}: {err}", + "{icon} failed to serialize typegraph {name} from {path}: {err}", icon = "✗".red(), name = output.get_typegraph_name().cyan(), - path = self.path, + path = self.task_ref.path.display().yellow(), err = output.error, )); } @@ -151,4 +145,8 @@ impl TaskAction for SerializeAction { "migrationsDir": ".", // TODO }) } + + fn get_task_ref(&self) -> &crate::deploy::actors::task_manager::TaskRef { + &self.task_ref + } } diff --git a/meta-cli/src/deploy/actors/task_manager.rs b/meta-cli/src/deploy/actors/task_manager.rs index ffbc78a81c..aa6cba18e2 100644 --- a/meta-cli/src/deploy/actors/task_manager.rs +++ b/meta-cli/src/deploy/actors/task_manager.rs @@ -1,21 +1,25 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use std::collections::HashSet; +use std::{ + collections::{HashSet, VecDeque}, + sync::atomic::{AtomicUsize, Ordering}, +}; use futures::channel::oneshot; use indexmap::IndexMap; -use tokio::sync::{OwnedSemaphorePermit, Semaphore}; use crate::{config::Config, interlude::*}; use super::{ console::{Console, ConsoleActor}, + discovery::DiscoveryActor, task::{ self, action::{TaskAction, TaskActionGenerator}, TaskActor, TaskFinishStatus, }, + watcher::WatcherActor, }; pub mod report; @@ -27,29 +31,19 @@ pub mod message { #[derive(Message)] #[rtype(result = "()")] pub struct AddTask { - pub path: Arc, + pub task_ref: TaskRef, pub reason: TaskReason, } #[derive(Message)] #[rtype(result = "()")] - pub(super) struct StartTask { - pub path: Arc, - pub permit: OwnedSemaphorePermit, - } + pub(super) struct NextTask; #[derive(Message)] #[rtype(result = "()")] - pub enum UpdateTaskStatus { - Started { - path: Arc, - addr: Addr>, - }, - Finished { - path: Arc, - // for report - status: TaskFinishStatus, - }, + pub struct TaskFinished { + pub task_ref: TaskRef, + pub status: TaskFinishStatus, } /// manual stop (by CTRL-C handler) @@ -87,43 +81,187 @@ pub enum StopReason { Error, } +#[derive(Clone, Debug)] +pub struct TaskGenerator { + next_task_id: Arc, +} + +impl TaskGenerator { + pub fn generate(&self, path: Arc, retry_no: usize) -> TaskRef { + TaskRef { + path, + id: TaskId(self.next_task_id.fetch_add(1, Ordering::Relaxed)), + retry_no, + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +struct TaskId(usize); + +#[derive(Clone, Debug)] +pub struct TaskRef { + pub path: Arc, + pub id: TaskId, + pub retry_no: usize, +} + +enum RetryStatus { + Pending, + Cancelled, +} + +pub enum TaskSource { + Static(Vec), + Discovery(Arc), + DiscoveryAndWatch(Arc), +} + pub struct TaskManager { - config: Arc, - action_generator: A::Generator, + init_params: TaskManagerInit, + task_generator: TaskGenerator, active_tasks: HashMap, Addr>>, - pending_tasks: HashSet>, - permits: Arc, + task_queue: VecDeque, + pending_retries: HashMap, TaskId>, report_tx: Option>>, stop_reason: Option, reports: IndexMap, TaskFinishStatus>, + watcher_addr: Option>>, console: Addr, } -impl TaskManager { +pub struct TaskManagerInit { + config: Arc, + action_generator: A::Generator, + max_parallel_tasks: usize, + max_retry_count: usize, + console: Addr, + task_source: TaskSource, +} + +impl TaskManagerInit { pub fn new( config: Arc, action_generator: A::Generator, - max_parallel_tasks: usize, - report_tx: oneshot::Sender>, console: Addr, + task_source: TaskSource, ) -> Self { Self { config, action_generator, - active_tasks: Default::default(), - pending_tasks: Default::default(), - permits: Semaphore::new(max_parallel_tasks).into(), - report_tx: Some(report_tx), - stop_reason: None, - reports: Default::default(), + max_parallel_tasks: num_cpus::get(), + max_retry_count: 0, console, + task_source, } } - pub fn auto_stop(mut self) -> Self { - self.stop_reason = Some(StopReason::Natural); + pub fn max_parallel_tasks(mut self, max_parallel_tasks: usize) -> Self { + self.max_parallel_tasks = max_parallel_tasks; self } + + pub fn max_retry_count(mut self, max_retry_count: usize) -> Self { + self.max_retry_count = max_retry_count; + self + } + + pub async fn run(self) -> Report { + let (report_tx, report_rx) = oneshot::channel(); + + TaskManager::::create(move |ctx| { + let addr = ctx.address(); + + { + let addr = addr.downgrade(); + ctrlc::set_handler(move || { + debug!("CTRL-C handler"); + if let Some(addr) = addr.upgrade() { + addr.do_send(Stop); + } else { + std::process::exit(1); + } + }) + .unwrap_or_log(); + } + + let task_generator = TaskGenerator { + next_task_id: Arc::new(AtomicUsize::new(1)), + }; + + let watcher_addr = self.start_source(addr, task_generator.clone()); + + let console = self.console.clone(); + let task_manager = TaskManager:: { + init_params: self, + task_generator, + active_tasks: Default::default(), + task_queue: Default::default(), + pending_retries: Default::default(), + report_tx: Some(report_tx), + stop_reason: None, + reports: IndexMap::new(), + watcher_addr, + console, + }; + + task_manager + }); + + report_rx.await.expect("task manager has been dropped") + } + + fn start_source( + &self, + addr: Addr>, + task_generator: TaskGenerator, + ) -> Option>> { + match &self.task_source { + TaskSource::Static(paths) => { + for path in paths { + addr.do_send(AddTask { + task_ref: task_generator.generate(path.clone().into(), 0), + reason: TaskReason::User, + }); + } + None + } + TaskSource::Discovery(path) => { + DiscoveryActor::new( + self.config.clone(), + task_generator.clone(), + addr.clone(), + self.console.clone(), + path.clone(), + ) + .start(); + None + } + TaskSource::DiscoveryAndWatch(path) => { + let path: Arc = path.clone(); + DiscoveryActor::new( + self.config.clone(), + task_generator.clone(), + addr.clone(), + self.console.clone(), + path.clone(), + ) + .start(); + + let watcher = WatcherActor::new( + self.config.clone(), + path, + task_generator.clone(), + addr.clone(), + self.console.clone(), + ) + .unwrap_or_log() + .start(); + + Some(watcher) + } + } + } } #[derive(Debug)] @@ -144,6 +282,21 @@ impl Actor for TaskManager { self.console.debug("started task manager".to_string()); } + fn stopping(&mut self, ctx: &mut Self::Context) -> Running { + match &self.stop_reason { + Some(reason) => { + if matches!(reason, StopReason::Restart) { + self.init_params + .start_source(ctx.address(), self.task_generator.clone()); + Running::Continue + } else { + Running::Stop + } + } + None => Running::Continue, + } + } + fn stopped(&mut self, _ctx: &mut Self::Context) { trace!("TaskManager stopped"); // send report @@ -168,100 +321,112 @@ impl Handler for TaskManager { type Result = (); fn handle(&mut self, msg: AddTask, ctx: &mut Context) -> Self::Result { - match &msg.reason { - TaskReason::User => {} - TaskReason::Discovery => { - self.console.info(format!( - "discovered typegraph definition module {:?}", - msg.path - )); - } - TaskReason::FileChanged => { + let pending_retry_id = self.pending_retries.remove(&msg.task_ref.path); + + if msg.task_ref.retry_no > 0 { + if let Some(retry_task_id) = pending_retry_id { + if retry_task_id != msg.task_ref.id { + self.console.warning( + "invalid state: different task id for retry; cancelling".to_string(), + ); + return; + } + // ok + } else { self.console - .info(format!("file changed {:?}, reloading", msg.path)); - } - TaskReason::DependencyChanged(dep) => { - self.console.info(format!( - "dependency changed {:?}, reloading {:?}", - dep, msg.path - )); - } - TaskReason::Retry(_) => { - // TODO retry no? - self.console.info(format!("retrying {:?}", msg.path)); + .warning("invalid state: unregistered retry; cancelling".to_string()); + return; } + } else { + // retry cancelled } - self.pending_tasks.insert(msg.path.clone()); - - let path = msg.path.clone(); - let permits = self.permits.clone(); - let addr = ctx.address(); - - let fut = async move { - let permit = permits.acquire_owned().await.unwrap_or_log(); - addr.do_send(StartTask { path, permit }); - }; - - ctx.spawn(fut.in_current_span().into_actor(self)); + self.task_queue.push_back(msg.task_ref); + ctx.address().do_send(message::NextTask); } } -impl Handler for TaskManager { +impl Handler for TaskManager { type Result = (); - fn handle(&mut self, message: StartTask, ctx: &mut Context) -> Self::Result { + fn handle(&mut self, _msg: NextTask, ctx: &mut Context) -> Self::Result { + if self.active_tasks.len() >= self.init_params.max_parallel_tasks { + // too busy + return; + } + + let Some(task) = self.task_queue.pop_front() else { + // nothing to do + return; + }; + if let Some(stop_reason) = &self.stop_reason { match stop_reason { StopReason::Natural => {} _ => { self.console - .warning(format!("task cancelled for {:?}", message.path)); + .warning(format!("task cancelled for {:?}", task.path)); return; } } } - let action = self.action_generator.generate( - message.path.clone(), + + let action = self.init_params.action_generator.generate( + task.clone(), Default::default(), // TODO - message.permit, ); - let path = action.get_path_owned(); let task_addr = TaskActor::new( - self.config.clone(), + self.init_params.config.clone(), action, ctx.address(), self.console.clone(), ) .start(); - self.pending_tasks.remove(&path); - self.active_tasks.insert(path.clone(), task_addr); + + self.active_tasks.insert(task.path, task_addr); } } -impl Handler> for TaskManager { +impl Handler> for TaskManager { type Result = (); - fn handle(&mut self, message: UpdateTaskStatus, ctx: &mut Context) -> Self::Result { - match message { - UpdateTaskStatus::Started { .. } => { - // TODO remove - unused + fn handle(&mut self, message: TaskFinished, ctx: &mut Context) -> Self::Result { + self.console.debug("task finished".to_string()); + self.active_tasks.remove(&message.task_ref.path); + ctx.address().do_send(NextTask); + + let mut next_retry_no = None; + match &message.status { + TaskFinishStatus::Error => { + if message.task_ref.retry_no < self.init_params.max_retry_count { + next_retry_no = Some(message.task_ref.retry_no + 1); + } + } + TaskFinishStatus::Finished(results) => { + // TODO partial retry - if multiple typegraphs in a single file + if results.iter().any(|r| matches!(r, Err(_))) { + next_retry_no = Some(message.task_ref.retry_no + 1); + } } - UpdateTaskStatus::Finished { - path: typegraph_path, - status, - } => { - self.active_tasks.remove(&typegraph_path); - self.reports.insert(typegraph_path.clone(), status); - if self.active_tasks.is_empty() { - match self.stop_reason { - Some(StopReason::Natural | StopReason::Manual) => { - self.console.debug("all tasks finished".to_string()); - - ctx.stop(); - } - _ => {} + _ => {} + }; + + self.reports + .insert(message.task_ref.path.clone(), message.status); + + // TODO check queue?? + if self.active_tasks.is_empty() { + if self.watcher_addr.is_none() && self.pending_retries.is_empty() { + // no watcher, auto stop when all tasks finished + self.console.debug("all tasks finished".to_string()); + self.stop_reason = Some(StopReason::Natural); + ctx.stop(); + } else { + match self.stop_reason { + Some(StopReason::Manual) => { + ctx.stop(); } + _ => {} } } } @@ -272,6 +437,9 @@ impl Handler for TaskManager { type Result = (); fn handle(&mut self, _msg: Stop, ctx: &mut Context) -> Self::Result { + if let Some(watcher) = &self.watcher_addr { + watcher.do_send(super::watcher::message::Stop); + } match self.stop_reason.clone() { Some(reason) => match reason { StopReason::Natural | StopReason::Restart => { diff --git a/meta-cli/src/deploy/actors/watcher.rs b/meta-cli/src/deploy/actors/watcher.rs index ed1db67b07..1fab8de179 100644 --- a/meta-cli/src/deploy/actors/watcher.rs +++ b/meta-cli/src/deploy/actors/watcher.rs @@ -2,8 +2,8 @@ // SPDX-License-Identifier: MPL-2.0 use super::console::Console; -use super::task::deploy::DeployAction; -use super::task_manager::{self, TaskManager, TaskReason}; +use super::task::action::TaskAction; +use super::task_manager::{self, TaskGenerator, TaskManager, TaskReason}; use crate::config::Config; use crate::deploy::actors::console::ConsoleActor; use crate::deploy::push::pusher::RetryManager; @@ -55,18 +55,19 @@ pub enum Event { ConfigChanged, } -pub struct WatcherActor { +pub struct WatcherActor { // TODO config path only config: Arc, directory: Arc, - task_manager: Addr>, + task_generator: TaskGenerator, + task_manager: Addr>, console: Addr, debouncer: Option>, dependency_graph: DependencyGraph, file_filter: FileFilter, } -impl Actor for WatcherActor { +impl Actor for WatcherActor { type Context = Context; fn started(&mut self, ctx: &mut Self::Context) { @@ -84,17 +85,19 @@ impl Actor for WatcherActor { } } -impl WatcherActor { +impl WatcherActor { pub fn new( config: Arc, directory: Arc, - task_manager: Addr>, + task_generator: TaskGenerator, + task_manager: Addr>, console: Addr, ) -> Result { let file_filter = FileFilter::new(&config)?; Ok(Self { config, directory, + task_generator, task_manager, console, debouncer: None, @@ -103,7 +106,10 @@ impl WatcherActor { }) } - fn start_watcher(&mut self, ctx: &mut ::Context) -> Result<()> { + fn start_watcher( + &mut self, + ctx: &mut as actix::Actor>::Context, + ) -> Result<()> { let self_addr = ctx.address(); let mut debouncer = new_debouncer(Duration::from_secs(1), move |res: DebounceEventResult| { @@ -131,7 +137,7 @@ impl WatcherActor { } } -impl Handler for WatcherActor { +impl Handler for WatcherActor { type Result = (); fn handle(&mut self, _msg: Stop, ctx: &mut Self::Context) -> Self::Result { @@ -139,7 +145,7 @@ impl Handler for WatcherActor { } } -impl Handler for WatcherActor { +impl Handler for WatcherActor { type Result = (); fn handle(&mut self, msg: File, ctx: &mut Self::Context) -> Self::Result { @@ -165,7 +171,7 @@ impl Handler for WatcherActor { RetryManager::clear_counter(&path); self.task_manager.do_send(task_manager::message::AddTask { - path: path.into(), + task_ref: self.task_generator.generate(path.into(), 0), reason: TaskReason::DependencyChanged(dependency_path), }); } @@ -180,7 +186,7 @@ impl Handler for WatcherActor { RetryManager::clear_counter(&path); self.task_manager.do_send(task_manager::message::AddTask { - path: path.into(), + task_ref: self.task_generator.generate(path.into(), 0), reason: TaskReason::FileChanged, }); } @@ -196,7 +202,7 @@ impl Handler for WatcherActor { } } -impl Handler for WatcherActor { +impl Handler for WatcherActor { type Result = (); fn handle(&mut self, msg: UpdateDependencies, _ctx: &mut Self::Context) -> Self::Result { @@ -204,7 +210,7 @@ impl Handler for WatcherActor { } } -impl Handler for WatcherActor { +impl Handler for WatcherActor { type Result = (); fn handle(&mut self, msg: RemoveTypegraph, _ctx: &mut Self::Context) -> Self::Result { diff --git a/meta-cli/src/deploy/push/pusher.rs b/meta-cli/src/deploy/push/pusher.rs index 08903b5f3b..64a4891f05 100644 --- a/meta-cli/src/deploy/push/pusher.rs +++ b/meta-cli/src/deploy/push/pusher.rs @@ -97,207 +97,6 @@ pub struct PushResult { sdk_response: SDKResponse, } -// impl PushResult { -// pub fn new( -// console: Addr, -// loader: Addr, -// sdk_response: SDKResponse, -// ) -> Result { -// let raw = sdk_response -// .as_push_result() -// .wrap_err("SDK error pushing to typegate")?; -// -// let failure = match raw.failure { -// Some(failure) => Some(serde_json::from_str(&failure)?), -// None => None, -// }; -// -// Ok(Self { -// name: raw.name, -// messages: raw.messages, -// migrations: raw.migrations, -// failure, -// original_name: sdk_response.typegraph_name.clone(), -// console, -// loader, -// sdk_response, -// }) -// } -// -// #[tracing::instrument] -// pub async fn finalize(&self) -> Result<()> { -// let name = self.name.clone(); -// let print_failure = || { -// self.console.error(format!( -// "{} Error encountered while pushing {name}.", -// "✕".red(), -// name = name.cyan() -// )); -// }; -// -// let print_success = || { -// self.console.info(format!( -// "{} Successfully pushed typegraph {name}.", -// "✓".green(), -// name = name.cyan() -// )); -// }; -// -// // tg workdir + prisma_migration_rel -// let migdir = ServerStore::get_config() -// .unwrap() -// .prisma_migration_dir_abs(&self.original_name); -// -// for migrations in self.migrations.iter() { -// let dest = migdir.join(&migrations.runtime); -// if let Err(err) = common::archive::unpack(&dest, Some(migrations.migrations.clone())) { -// self.console.error(format!( -// "error while unpacking migrations into {:?}", -// migdir -// )); -// self.console.error(format!("{err:?}")); -// } else { -// self.console.info(format!( -// "Successfully unpacked migrations for {name}/{} at {:?}", -// migrations.runtime, dest -// )); -// } -// } -// -// if let Some(failure) = self.failure.clone() { -// print_failure(); -// match failure { -// PushFailure::Unknown(fail) => { -// self.console.error(format!( -// "Unknown error while pushing typegraph {tg_name}\n{msg}", -// tg_name = name.cyan(), -// msg = fail.message -// )); -// } -// PushFailure::DatabaseResetRequired(failure) => { -// handle_database_reset( -// self.console.clone(), -// self.loader.clone(), -// failure, -// self.sdk_response.clone(), -// ) -// .await? -// } -// PushFailure::NullConstraintViolation(failure) => { -// handle_null_constraint_violation( -// self.console.clone(), -// self.loader.clone(), -// failure, -// self.sdk_response.clone(), -// migdir.clone(), -// ) -// .await? -// } -// } -// } else { -// print_success(); -// } -// Ok(()) -// } -// } -// -// // DatabaseReset Handler + interactivity -// -// #[tracing::instrument] -// async fn handle_database_reset( -// console: Addr, -// loader: Addr, -// failure: DatabaseResetRequired, -// sdk_response: SDKResponse, -// ) -> Result<()> { -// let DatabaseResetRequired { -// message, -// runtime_name, -// } = failure; -// -// let name = sdk_response.typegraph_name.clone(); -// -// console.error(message); -// console.warning(format!( -// "Database reset required for prisma runtime {rt} in typegraph {name}", -// rt = runtime_name.magenta(), -// )); -// -// let rt = runtime_name.clone(); -// let _ = Confirm::new( -// console, -// format!("Do you want to reset the database for runtime {rt} on {name}?"), -// ) -// .interact(Box::new(ConfirmDatabaseResetRequired { -// typegraph_path: sdk_response.typegraph_path, -// runtime_name, -// loader, -// })) -// .await?; -// -// Ok(()) -// } -// -// // NullConstraintViolation Handler + interactivity -// -// #[tracing::instrument] -// pub async fn handle_null_constraint_violation( -// console: Addr, -// loader: Addr, -// failure: NullConstraintViolation, -// sdk_response: SDKResponse, -// migration_dir: PathBuf, -// ) -> Result<()> { -// let NullConstraintViolation { -// message, -// runtime_name, -// migration_name, -// is_new_column, -// column, -// table, -// } = failure; -// -// console.error(message); -// -// if is_new_column { -// console.info(format!("manually edit the migration {migration_name}; or remove the migration and add set a default value")); -// -// let remove_latest = RemoveLatestMigration { -// loader: loader.clone(), -// typegraph_path: sdk_response.typegraph_path.clone(), -// migration_dir: migration_dir.clone(), -// runtime_name: runtime_name.clone(), -// migration_name: migration_name.clone(), -// console: console.clone(), -// }; -// -// let manual = ManualResolution { -// loader: loader.clone(), -// typegraph_path: sdk_response.typegraph_path.clone(), -// migration_dir: migration_dir.clone(), -// runtime_name: runtime_name.clone(), -// migration_name: migration_name.clone(), -// message: Some(format!( -// "Set a default value for the column `{}` in the table `{}`", -// column, table -// )), -// console: console.clone(), -// }; -// -// let reset = ForceReset { -// loader: loader.clone(), -// runtime_name: runtime_name.clone(), -// typegraph_path: sdk_response.typegraph_path.clone(), -// }; -// -// let _ = Select::new(console, "Choose one of the following options".to_string()) -// .interact(&[Box::new(remove_latest), Box::new(manual), Box::new(reset)]) -// .await?; -// } -// -// Ok(()) -// } - lazy_static! { static ref RETRY_COUNTERS: Mutex>> = Mutex::new(HashMap::new()); } diff --git a/meta-cli/src/typegraph/loader/discovery.rs b/meta-cli/src/typegraph/loader/discovery.rs index ff1d0b76f4..ec57f60864 100644 --- a/meta-cli/src/typegraph/loader/discovery.rs +++ b/meta-cli/src/typegraph/loader/discovery.rs @@ -46,6 +46,8 @@ impl Discovery { let path = entry.path(); if !self.filter.is_excluded(path, &mut searcher) { handler(Ok(path.to_path_buf())); + } else { + trace!("excluded from discovery {path:?}"); } } Err(err) => { diff --git a/typegate/src/typegate/mod.ts b/typegate/src/typegate/mod.ts index 35ff3b6acf..18f89af5d5 100644 --- a/typegate/src/typegate/mod.ts +++ b/typegate/src/typegate/mod.ts @@ -32,8 +32,9 @@ import { resolveIdentifier } from "../services/middlewares.ts"; import { handleGraphQL } from "../services/graphql_service.ts"; import { getLogger } from "../log.ts"; import { MigrationFailure } from "../runtimes/prisma/hooks/run_migrations.ts"; -import introspectionJson from "../typegraphs/introspection.json" with { type: - "json" }; +import introspectionJson from "../typegraphs/introspection.json" with { + type: "json", +}; import { ArtifactService } from "../services/artifact_service.ts"; import { ArtifactStore } from "./artifacts/mod.ts"; import { SyncConfig } from "../sync/config.ts"; @@ -382,14 +383,14 @@ export class Typegate implements AsyncDisposable { const introspection = enableIntrospection ? await TypeGraph.init( - this, - introspectionDef, - new SecretManager(introspectionDef, {}), - { - typegraph: TypeGraphRuntime.init(tgDS, [], {}), - }, - null, - ) + this, + introspectionDef, + new SecretManager(introspectionDef, {}), + { + typegraph: TypeGraphRuntime.init(tgDS, [], {}), + }, + null, + ) : null; const tg = await TypeGraph.init( diff --git a/typegraph/node/sdk/src/io.ts b/typegraph/node/sdk/src/io.ts index 2523057e29..c07facf366 100644 --- a/typegraph/node/sdk/src/io.ts +++ b/typegraph/node/sdk/src/io.ts @@ -56,83 +56,9 @@ class RpcResponseReader { // private handler: (line: string) => void; constructor() { - log.debug("creating readline interface"); process.stdin.setEncoding("utf-8"); - // this.readline = createInterface({ - // input: process.stdin, - // }); - - // const handler = (line: string) => { - // log.debug("got line", line); - // const message = JSON.parse(line); - // const resolver = this.resolvers.get(message.id); - // if (resolver) { - // log.debug("rpc response", message); - // resolver(message.result); - // this.resolvers.delete(message.id); - - // if (this.resolvers.size === 0) { - // this.readline.pause(); - // log.debug("paused"); - // } - // } - // }; - - // log.debug("adding line handler"); - // this.readline.on("line", handler); - - // log.debug("unref stdin"); - // process.stdin.unref(); - // log.debug("unreffed stdin"); } - // async open() { - // if (this.readline) { // return; - // } - // this.readline = createInterface({ - // input: process.stdin, - // }); - // log.debug("opened"); - - // for await (const line of this.readline) { - // const message = JSON.parse(line); - // const resolver = this.resolvers.get(message.id); - // if (resolver) { - // log.debug("rpc response", message); - // resolver(message.result); - // this.resolvers.delete(message.id); - // if (this.resolvers.size === 0) { - // this.readline.close(); - // log.debug("closed"); - // } - // } - // } - // } - - // TODO implement timeout - // async loop() { - // log.debug("loop: on"); - // for await (const line of this.readline) { - // log.debug("resolvers", this.resolvers.size, line); - // try { - // const message = JSON.parse(line); - // const resolver = this.resolvers.get(message.id); - // if (resolver) { - // log.debug("rpc response", message); - // resolver(message.result); - // this.resolvers.delete(message.id); - // if (this.resolvers.size === 0) { - // break; - // } - // } - // } catch (e) { - // // pass - // } - // } - // this.running = false; - // log.debug("loop: off"); - // } - read(id: number) { return new Promise((resolve, reject) => { const handler = () => { @@ -164,39 +90,6 @@ class RpcResponseReader { }; process.stdin.on("readable", handler); }); - - // if (!this.listening) { - // this.readline.resume(); - // log.debug("listening: on"); - // this.listening = true; - // this.readline.on("line", this.handler); - // } - // if (!this.running) { - // this.running = true; - // this.loop(); - // } - // this.open(); // no await - // this.readline.resume(); - // return new Promise((resolve) => { - // this.resolvers.set(id, resolve); - // }); - - // return new Promise((resolve, reject) => { - // this.readline.resume(); - // this.readline.once("line", (line) => { - // try { - // const message = JSON.parse(line); - // if (message.id !== id) { - // reject("required sequential read"); - // } else { - // this.readline.pause(); - // resolve(message.result); - // } - // } catch (e) { - // reject(e); - // } - // }); - // }); } } diff --git a/typegraph/node/sdk/src/tg_artifact_upload.ts b/typegraph/node/sdk/src/tg_artifact_upload.ts index c33e23fd6d..fd625f6e59 100644 --- a/typegraph/node/sdk/src/tg_artifact_upload.ts +++ b/typegraph/node/sdk/src/tg_artifact_upload.ts @@ -54,7 +54,8 @@ export class ArtifactUploader { // const uploadUrls: Array = await response.json(); if (uploadUrls.length !== artifactMetas.length) { - const diff = `array length mismatch: ${uploadUrls.length} !== ${artifactMetas.length}`; + const diff = + `array length mismatch: ${uploadUrls.length} !== ${artifactMetas.length}`; throw new Error(`Failed to get upload URLs for all artifacts: ${diff}`); } @@ -86,7 +87,7 @@ export class ArtifactUploader { const path = join(dirname(this.tgPath), meta.relativePath); // TODO: stream const content = await fsp.readFile(path); - log.info("uploading artifact", meta.relativePath, urlObj.href); + log.debug("uploading artifact", meta.relativePath, urlObj.href); const res = await execRequest( urlObj, { @@ -96,6 +97,8 @@ export class ArtifactUploader { } as RequestInit, `failed to upload artifact ${meta.relativePath}`, ); + log.info("✓ artifact uploaded:", meta.relativePath); + if (!res.ok) { const err = await res.json(); // To be read by the CLI? diff --git a/typegraph/node/sdk/src/tg_manage.ts b/typegraph/node/sdk/src/tg_manage.ts index a69bfc3067..2492857c47 100644 --- a/typegraph/node/sdk/src/tg_manage.ts +++ b/typegraph/node/sdk/src/tg_manage.ts @@ -6,7 +6,7 @@ import { BasicAuth, tgDeploy } from "./tg_deploy.js"; import { TgFinalizationResult, TypegraphOutput } from "./typegraph.js"; import { getEnvVariable } from "./utils/func_utils.js"; import { freezeTgOutput } from "./utils/func_utils.js"; -import { log, rpc, GlobalConfig, TypegraphConfig } from "./io.js"; +import { GlobalConfig, log, rpc, TypegraphConfig } from "./io.js"; const PORT = "MCLI_SERVER_PORT"; // meta-cli instance that executes the current file const SELF_PATH = "MCLI_TG_PATH"; // path to the current file to uniquely identify the run results @@ -87,7 +87,6 @@ export class Manager { async run() { const command = Manager.getCommand(); - log.debug("Manager: command is", command); const finalizeParams = { typegraphPath: this.#typegraphPath, diff --git a/typegraph/node/sdk/src/typegraph.ts b/typegraph/node/sdk/src/typegraph.ts index 3d25a66b20..9e90b56d7b 100644 --- a/typegraph/node/sdk/src/typegraph.ts +++ b/typegraph/node/sdk/src/typegraph.ts @@ -149,12 +149,14 @@ export async function typegraph( maybeBuilder?: TypegraphBuilder, ): Promise { ++counter; - const args = - typeof nameOrArgs === "string" ? { name: nameOrArgs } : nameOrArgs; + const args = typeof nameOrArgs === "string" + ? { name: nameOrArgs } + : nameOrArgs; const { name, dynamic, cors, prefix, rate, secrets } = args; - const builder = - "builder" in args ? (args.builder as TypegraphBuilder) : maybeBuilder!; + const builder = "builder" in args + ? (args.builder as TypegraphBuilder) + : maybeBuilder!; const file = caller(); if (!file) { @@ -218,7 +220,6 @@ export async function typegraph( const ret = { serialize(config: FinalizeParams) { - log.debug("finalizeParams", config); try { const [tgJson, ref_artifacts] = core.finalizeTypegraph(config); const result: TgFinalizationResult = { @@ -239,11 +240,8 @@ export async function typegraph( } as TypegraphOutput; if (Manager.isRunFromCLI()) { - log.debug("creating Manager"); const manager = await Manager.init(ret); - log.debug("running Manager"); await manager.run(); - log.debug("done"); // TODO solve hanging process (stdin??) setTimeout(() => { diff --git a/typegraph/node/sdk/src/utils/func_utils.ts b/typegraph/node/sdk/src/utils/func_utils.ts index 1c1dacf1d8..97fdc3fe89 100644 --- a/typegraph/node/sdk/src/utils/func_utils.ts +++ b/typegraph/node/sdk/src/utils/func_utils.ts @@ -89,8 +89,8 @@ export function freezeTgOutput( config: FinalizeParams, tgOutput: TypegraphOutput, ): TypegraphOutput { - frozenMemo[tgOutput.name] = - frozenMemo[tgOutput.name] ?? tgOutput.serialize(config); + frozenMemo[tgOutput.name] = frozenMemo[tgOutput.name] ?? + tgOutput.serialize(config); return { ...tgOutput, serialize: () => frozenMemo[tgOutput.name], From 0a898f2224a439d55242903a0ec292e79d21f097 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Fri, 7 Jun 2024 10:49:53 +0300 Subject: [PATCH 11/35] fix python sdk --- meta-cli/src/deploy/actors/task/deploy.rs | 1 - typegraph/node/sdk/src/io.ts | 4 - typegraph/node/sdk/src/metagen.ts | 6 +- typegraph/node/sdk/src/tg_artifact_upload.ts | 6 +- typegraph/node/sdk/src/tg_deploy.ts | 2 +- typegraph/node/sdk/src/tg_manage.ts | 81 +----- typegraph/python/typegraph/graph/metagen.py | 29 +- .../python/typegraph/graph/shared_types.py | 6 +- .../typegraph/graph/tg_artifact_upload.py | 20 +- typegraph/python/typegraph/graph/tg_deploy.py | 76 +++-- typegraph/python/typegraph/graph/tg_manage.py | 261 ++++++------------ typegraph/python/typegraph/graph/typegraph.py | 21 +- typegraph/python/typegraph/io.py | 152 ++++++++++ typegraph/python/typegraph/log.py | 30 -- typegraph/python/typegraph/utils.py | 4 +- typegraph/python/typegraph/wit.py | 4 +- 16 files changed, 347 insertions(+), 356 deletions(-) create mode 100644 typegraph/python/typegraph/io.py delete mode 100644 typegraph/python/typegraph/log.py diff --git a/meta-cli/src/deploy/actors/task/deploy.rs b/meta-cli/src/deploy/actors/task/deploy.rs index b756547e85..7135c52042 100644 --- a/meta-cli/src/deploy/actors/task/deploy.rs +++ b/meta-cli/src/deploy/actors/task/deploy.rs @@ -1,7 +1,6 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -mod migration_resolution; mod migrations; use super::action::{ diff --git a/typegraph/node/sdk/src/io.ts b/typegraph/node/sdk/src/io.ts index c07facf366..5e5aea0855 100644 --- a/typegraph/node/sdk/src/io.ts +++ b/typegraph/node/sdk/src/io.ts @@ -49,11 +49,7 @@ export const log = { }; class RpcResponseReader { - private resolvers: Map void> = new Map(); - // private readline: Interface; private buffer: string = ""; - // private listening = false; - // private handler: (line: string) => void; constructor() { process.stdin.setEncoding("utf-8"); diff --git a/typegraph/node/sdk/src/metagen.ts b/typegraph/node/sdk/src/metagen.ts index 2aeeaf763c..f3f6def0cd 100644 --- a/typegraph/node/sdk/src/metagen.ts +++ b/typegraph/node/sdk/src/metagen.ts @@ -20,9 +20,9 @@ const finalizeParams = { migrationsDir: "prisma-migrations", migrationActions: [], defaultMigrationAction: { - apply: true, - create: true, - reset: true, + apply: false, + create: false, + reset: false, }, }, } satisfies FinalizeParams; diff --git a/typegraph/node/sdk/src/tg_artifact_upload.ts b/typegraph/node/sdk/src/tg_artifact_upload.ts index fd625f6e59..58fb970e31 100644 --- a/typegraph/node/sdk/src/tg_artifact_upload.ts +++ b/typegraph/node/sdk/src/tg_artifact_upload.ts @@ -54,8 +54,7 @@ export class ArtifactUploader { // const uploadUrls: Array = await response.json(); if (uploadUrls.length !== artifactMetas.length) { - const diff = - `array length mismatch: ${uploadUrls.length} !== ${artifactMetas.length}`; + const diff = `array length mismatch: ${uploadUrls.length} !== ${artifactMetas.length}`; throw new Error(`Failed to get upload URLs for all artifacts: ${diff}`); } @@ -97,7 +96,6 @@ export class ArtifactUploader { } as RequestInit, `failed to upload artifact ${meta.relativePath}`, ); - log.info("✓ artifact uploaded:", meta.relativePath); if (!res.ok) { const err = await res.json(); @@ -109,7 +107,7 @@ export class ArtifactUploader { ); } const ret = res.json(); - log.info(`Successfully uploaded artifact`, meta.relativePath); + log.info("✓ artifact uploaded:", meta.relativePath); return ret; } diff --git a/typegraph/node/sdk/src/tg_deploy.ts b/typegraph/node/sdk/src/tg_deploy.ts index a0ecd3e50c..18540a56ce 100644 --- a/typegraph/node/sdk/src/tg_deploy.ts +++ b/typegraph/node/sdk/src/tg_deploy.ts @@ -88,7 +88,7 @@ export async function tgDeploy( if (refArtifacts.length > 0) { // upload the artifacts const artifactUploader = new ArtifactUploader( - params.typegate.url, + typegate.url, refArtifacts, typegraph.name, typegate.auth, diff --git a/typegraph/node/sdk/src/tg_manage.ts b/typegraph/node/sdk/src/tg_manage.ts index 2492857c47..c8b5f75f80 100644 --- a/typegraph/node/sdk/src/tg_manage.ts +++ b/typegraph/node/sdk/src/tg_manage.ts @@ -13,39 +13,6 @@ const SELF_PATH = "MCLI_TG_PATH"; // path to the current file to uniquely identi type Command = "serialize" | "deploy" | "codegen"; -// Types for CLI => SDK -type CLIServerResponse = { - command: Command; - config: CLIConfigRequest; -}; - -type CLIConfigRequest = { - typegate: { - endpoint: string; - auth?: { - // field not required for serialize command - username: string; - password: string; - }; - }; - prefix?: string; - secrets: Record; - artifactsConfig: FinalizeParams; - disableArtifactResolution: boolean; - codegen: boolean; -}; - -type CLISuccess = { - data: T; -}; - -// Types for SDK => CLI (typically forwarding the response from typegate) -type SDKResponse = { - command: Command; - typegraphName: string; - typegraphPath: string; -} & ({ error: T } | { data: T }); - export class Manager { #typegraph: TypegraphOutput; #typegraphPath: string; @@ -112,51 +79,17 @@ export class Manager { } } - // async #requestCommands(): Promise { - // const { data: config } = await this.#requestConfig(); - // // console.error("SDK received config", config); - // const { data: command } = - // await (await fetch(new URL("command", this.#endpoint))) - // .json() as CLISuccess; - // // console.error("SDK received command", command); - // - // return { command, config }; - // } - // - // async #requestConfig(): Promise> { - // const params = new URLSearchParams({ - // typegraph: this.#typegraph.name, - // typegraph_path: this.#typegraphPath, - // }); - // const response = await fetch(new URL("config?" + params, this.#endpoint)); - // return (await response.json()) as CLISuccess; - // } - async #serialize(config: FinalizeParams): Promise { let finalizationResult: TgFinalizationResult; try { finalizationResult = this.#typegraph.serialize(config); + log.success(finalizationResult.tgJson, true); } catch (err: any) { log.failure({ typegraph: this.#typegraph.name, error: err?.message ?? "failed to serialize typegraph", }); - return; - // return await this.#relayErrorToCLI( - // "serialize", - // "serialization_err", - // err?.message ?? "error serializing typegraph", - // { - // err, - // }, - // ); } - - log.success(finalizationResult.tgJson, true); - // await this.#relayResultToCLI( - // "serialize", - // JSON.parse(finalizationResult.tgJson), - // ); } async #deploy(finalizeParams: FinalizeParams): Promise { @@ -180,14 +113,6 @@ export class Manager { error: err?.message ?? "failed to serialize typegraph", }); return; - // return await this.#relayErrorToCLI( - // "deploy", - // "serialization_err", - // err?.message ?? "error serializing typegraph", - // { - // err, - // }, - // ); } const reusableTgOutput = { ...this.#typegraph, @@ -197,10 +122,6 @@ export class Manager { if (finalizeParams.codegen) { // TODO throw new Error("not implemented"); - // await this.#relayResultToCLI( - // "codegen", - // JSON.parse(frozenSerialized.tgJson), - // ); } try { diff --git a/typegraph/python/typegraph/graph/metagen.py b/typegraph/python/typegraph/graph/metagen.py index 868c981890..5b8317eb6f 100644 --- a/typegraph/python/typegraph/graph/metagen.py +++ b/typegraph/python/typegraph/graph/metagen.py @@ -4,26 +4,31 @@ import json from typing import List, Union from typegraph.gen.exports.core import ( - ArtifactResolutionConfig, MigrationAction, - MigrationConfig, + FinalizeParams, + PrismaMigrationConfig, ) from typegraph.gen.exports.utils import MdkConfig, MdkOutput from typegraph.gen.types import Err from typegraph.graph.shared_types import TypegraphOutput from typegraph.utils import freeze_tg_output from typegraph.wit import store, wit_utils +from os import environ as env -codegen_artefact_config = ArtifactResolutionConfig( - prisma_migration=MigrationConfig( - global_action=MigrationAction(create=False, reset=False), - migration_dir=".", - runtime_actions=None, - ), - dir=None, +_tg_path = env.get("MCLI_TG_PATH") +if _tg_path is None: + raise Exception("MCLI_TG_PATH not set") + +finalize_params = FinalizeParams( + typegraph_path=_tg_path, prefix=None, - disable_artifact_resolution=True, + artifact_resolution=False, codegen=True, + prisma_migration=PrismaMigrationConfig( + migrations_dir="prisma-migrations", + migration_actions=[], + default_migration_action=MigrationAction(apply=False, create=False, reset=False), + ) ) @@ -40,9 +45,9 @@ def _get_mdk_config( tg_output: TypegraphOutput, target_name: str, ) -> MdkConfig: - frozen_out = freeze_tg_output(codegen_artefact_config, tg_output) + frozen_out = freeze_tg_output(finalize_params, tg_output) return MdkConfig( - tg_json=frozen_out.serialize(codegen_artefact_config).tgJson, + tg_json=frozen_out.serialize(finalize_params).tgJson, config_json=json.dumps(self.gen_config), workspace_path=self.workspace_path, target_name=target_name, diff --git a/typegraph/python/typegraph/graph/shared_types.py b/typegraph/python/typegraph/graph/shared_types.py index ea17e9349c..67c1c3f7dd 100644 --- a/typegraph/python/typegraph/graph/shared_types.py +++ b/typegraph/python/typegraph/graph/shared_types.py @@ -3,9 +3,9 @@ from base64 import b64encode from dataclasses import dataclass -from typing import Callable, List, Optional +from typing import Callable, List from typegraph.gen.exports.core import Artifact -from typegraph.wit import ArtifactResolutionConfig +from typegraph.wit import FinalizeParams @dataclass @@ -17,7 +17,7 @@ class FinalizationResult: @dataclass class TypegraphOutput: name: str - serialize: Callable[[Optional[ArtifactResolutionConfig]], FinalizationResult] + serialize: Callable[[FinalizeParams], FinalizationResult] @dataclass diff --git a/typegraph/python/typegraph/graph/tg_artifact_upload.py b/typegraph/python/typegraph/graph/tg_artifact_upload.py index 880aa34af1..03414130bf 100644 --- a/typegraph/python/typegraph/graph/tg_artifact_upload.py +++ b/typegraph/python/typegraph/graph/tg_artifact_upload.py @@ -12,7 +12,7 @@ from typegraph.gen.exports.core import Artifact from typegraph.gen.types import Err, Ok, Result from typegraph.graph.shared_types import BasicAuth -from typegraph import log +from typegraph.io import Log @dataclass @@ -66,6 +66,7 @@ def __fetch_upload_urls( try: response = request.urlopen(req) except HTTPError as e: + Log.error("error message:", e.fp.read().decode()) raise Exception(f"failed to get upload URLs: {e}") if response.status != 200: @@ -83,9 +84,10 @@ def __upload( if self.auth is not None: upload_headers["Authorization"] = self.auth.as_header_value() + Log.debug("upload headers", upload_headers) if url is None: - log.info("skipping artifact upload:", meta.relativePath) + Log.info("skipping artifact upload:", meta.relativePath) return Ok(None) if self.tg_path is None: @@ -105,7 +107,7 @@ def __upload( rebased_url = Url.urlunparse(parsed_url) - log.info("uploading artifact", meta.relativePath, rebased_url) + Log.debug("uploading artifact", meta.relativePath, rebased_url) upload_req = request.Request( url=rebased_url, method="POST", @@ -113,14 +115,18 @@ def __upload( headers=upload_headers, ) try: + Log.debug("uploading artifact", meta.relativePath, str(rebased_url)) response = request.urlopen(upload_req) except HTTPError as e: - log.debug(e) + Log.error("failed to upload artifact", meta.relativePath, e) errmsg = json.load(e.fp).get("error", None) + Log.error("error message:", errmsg) raise Exception(errmsg) if response.status != 201: raise Exception(f"failed to upload artifact {path} {response.status}") + Log.info("✓ artifact uploaded", meta.relativePath) + # TODO why?? return handle_response(response.read().decode()) @@ -144,7 +150,7 @@ def __handle_errors( for result, meta in zip(results, artifact_metas): if isinstance(result, Err): print( - f"Failed to upload artifact {meta.relativePath}: {result.value}", + f"failed to upload artifact {meta.relativePath}: {result.value}", file=sys.stderr, ) errors += 1 @@ -152,7 +158,7 @@ def __handle_errors( # print(f"Successfuly uploaded artifact {meta.relativePath}", file=sys.stderr) if errors > 0: - raise Exception(f"Failed to upload {errors} artifacts") + raise Exception(f"failed to upload {errors} artifacts") def upload_artifacts( self, @@ -160,7 +166,7 @@ def upload_artifacts( artifact_metas = self.get_metas(self.artifacts) upload_urls = self.__fetch_upload_urls(artifact_metas) - log.debug("upload urls", upload_urls) + Log.debug("upload urls", upload_urls) results = [] for i in range(len(artifact_metas)): diff --git a/typegraph/python/typegraph/graph/tg_deploy.py b/typegraph/python/typegraph/graph/tg_deploy.py index fa2868ebe8..2ea4d9b1a7 100644 --- a/typegraph/python/typegraph/graph/tg_deploy.py +++ b/typegraph/python/typegraph/graph/tg_deploy.py @@ -5,34 +5,42 @@ from dataclasses import dataclass from typing import Any, Dict, Optional, Union from urllib import request +from platform import python_version from typegraph.gen.exports.utils import QueryDeployParams from typegraph.gen.types import Err +from typegraph.gen.exports.core import MigrationAction, PrismaMigrationConfig from typegraph.graph.shared_types import BasicAuth from typegraph.graph.tg_artifact_upload import ArtifactUploader from typegraph.graph.typegraph import TypegraphOutput -from typegraph.wit import ArtifactResolutionConfig, store, wit_utils +from typegraph.wit import FinalizeParams, store, wit_utils +from typegraph import version as sdk_version +@dataclass +class TypegateConnectionOptions: + url: str + auth: Optional[BasicAuth] @dataclass class TypegraphDeployParams: - base_url: str - artifacts_config: ArtifactResolutionConfig - typegraph_path: Optional[str] - auth: Optional[BasicAuth] = None + typegate: TypegateConnectionOptions + typegraph_path: str + prefix: Optional[str] = None secrets: Optional[Dict[str, str]] = None + migrations_dir: Optional[str] = None + migration_actions: Optional[Dict[str, MigrationAction]] = None + default_migration_action: Optional[MigrationAction] = None @dataclass class TypegraphRemoveParams: - base_url: str - auth: Optional[BasicAuth] = None + typegate: TypegateConnectionOptions @dataclass class DeployResult: serialized: str - typegate: Union[Dict[str, Any], str] + response: Union[Dict[str, Any], str] @dataclass @@ -48,23 +56,43 @@ class UploadArtifactMeta: def tg_deploy(tg: TypegraphOutput, params: TypegraphDeployParams) -> DeployResult: - sep = "/" if not params.base_url.endswith("/") else "" - url = params.base_url + sep + "typegate" + typegate = params.typegate + + sep = "/" if not typegate.url.endswith("/") else "" + url = typegate.url + sep + "typegate" + + headers = {"Content-Type": "application/json", "User-Agent": f"Mozilla/5.0 TypegraphSdk/{sdk_version} Python/{python_version()}"} + if typegate.auth is not None: + headers["Authorization"] = typegate.auth.as_header_value() + + finalize_params = FinalizeParams( + typegraph_path=params.typegraph_path, + prefix=params.prefix, + artifact_resolution=True, + codegen=False, + prisma_migration=PrismaMigrationConfig( + migrations_dir=params.migrations_dir or "prisma-migrations", + migration_actions=[(k,v) for k, v in (params.migration_actions or {}).items()], + default_migration_action=params.default_migration_action or MigrationAction( + apply=True, + create=False, + reset=False + ), + ) + ) - headers = {"Content-Type": "application/json"} - if params.auth is not None: - headers["Authorization"] = params.auth.as_header_value() - serialized = tg.serialize(params.artifacts_config) + serialized = tg.serialize(finalize_params) tg_json = serialized.tgJson ref_artifacts = serialized.ref_artifacts + if len(ref_artifacts) > 0: # upload the referred artifacts artifact_uploader = ArtifactUploader( - params.base_url, + typegate.url, ref_artifacts, tg.name, - params.auth, + typegate.auth, headers, params.typegraph_path, ) @@ -93,17 +121,19 @@ def tg_deploy(tg: TypegraphOutput, params: TypegraphDeployParams) -> DeployResul response = response.read().decode() return DeployResult( serialized=tg_json, - typegate=handle_response(response), + response=handle_response(response).get("data").get("addTypegraph"), ) def tg_remove(tg: TypegraphOutput, params: TypegraphRemoveParams): - sep = "/" if not params.base_url.endswith("/") else "" - url = params.base_url + sep + "typegate" + typegate = params.typegate + + sep = "/" if not typegate.url.endswith("/") else "" + url = typegate.url + sep + "typegate" headers = {"Content-Type": "application/json"} - if params.auth is not None: - headers["Authorization"] = params.auth.as_header_value() + if typegate.auth is not None: + headers["Authorization"] = typegate.auth.as_header_value() res = wit_utils.gql_remove_query(store, [tg.name]) @@ -123,7 +153,7 @@ def tg_remove(tg: TypegraphOutput, params: TypegraphRemoveParams): # simple wrapper for a more descriptive error -def exec_request(req: any): +def exec_request(req: Any): try: return request.urlopen(req) except request.HTTPError as res: @@ -134,7 +164,7 @@ def exec_request(req: any): raise Exception(f"{e}: {req.full_url}") -def handle_response(res: any, url=""): +def handle_response(res: Any, url=""): try: return json.loads(res) except Exception as _: diff --git a/typegraph/python/typegraph/graph/tg_manage.py b/typegraph/python/typegraph/graph/tg_manage.py index 24391e57ab..15e8cb4849 100644 --- a/typegraph/python/typegraph/graph/tg_manage.py +++ b/typegraph/python/typegraph/graph/tg_manage.py @@ -1,232 +1,135 @@ # Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. # SPDX-License-Identifier: MPL-2.0 -import json import os import traceback -from dataclasses import dataclass from enum import Enum -from typing import Dict, Union, Any -from urllib import parse, request +from typing import Union, Optional from typegraph.gen.exports.core import ( - ArtifactResolutionConfig, - MigrationAction, - MigrationConfig, + FinalizeParams, + PrismaMigrationConfig, ) -from typegraph.graph.shared_types import BasicAuth, TypegraphOutput -from typegraph.graph.tg_deploy import TypegraphDeployParams, tg_deploy +from typegraph.graph.shared_types import TypegraphOutput +from typegraph.graph.tg_deploy import TypegateConnectionOptions, TypegraphDeployParams, tg_deploy from typegraph.utils import freeze_tg_output -from typegraph import log +from typegraph.io import Log, GlobalConfig, Rpc, TypegraphConfig PORT = "MCLI_SERVER_PORT" # meta-cli instance that executes the current file SELF_PATH = ( "MCLI_TG_PATH" # path to the current file to uniquely identify the run results ) - class Command(Enum): SERIALIZE = "serialize" DEPLOY = "deploy" - CODEGEN = "codegen" - - -# Types for CLI => SDK -@dataclass -class Typegate: - endpoint: str - auth: Union[None, BasicAuth] = None - -@dataclass -class CLIConfigRequest: - typegate: Typegate - secrets: Dict[str, str] - artifacts_config: ArtifactResolutionConfig - prefix: Union[None, str] = None +_env_command = os.environ.get("MCLI_ACTION") +command = None +if _env_command is not None: + if _env_command not in [Command.SERIALIZE.value, Command.DEPLOY.value]: + raise Exception(f"MCLI_ACTION env variable must be one of {Command.SERIALIZE.value}, {Command.DEPLOY.value}") + command = Command(_env_command) -@dataclass -class CLIServerResponse: - command: Command - config: CLIConfigRequest - +_global_config: Optional[GlobalConfig] = None +def get_global_config(): + global _global_config + if _global_config is None: + _global_config = Rpc.get_global_config() + return _global_config class Manager: - port: int typegraph: TypegraphOutput - endpoint: str typegraph_path: str + typegraph_config: TypegraphConfig + global_config: GlobalConfig def is_run_from_cli() -> bool: - return True if os.environ.get(PORT) else False + return os.environ.get("MCLI_ACTION") is not None def __init__(self, typegraph: TypegraphOutput, port: Union[None, int] = None): self.typegraph = typegraph - self.typegraph_path = os.environ.get(SELF_PATH) - if port is None: - self.port = int(os.environ.get(PORT)) - else: - self.port = port - self.endpoint = f"http://localhost:{self.port}" + tg_path = os.environ.get(SELF_PATH) + if tg_path is None: + raise Exception(f"{SELF_PATH} env variable not set") + self.typegraph_path = tg_path + self.global_config = get_global_config() + self.typegraph_config = Rpc.get_typegraph_config(typegraph.name) def run(self): - sdk = self.request_command() - if sdk.command == Command.SERIALIZE: - self.serialize(sdk.config) - elif sdk.command == Command.DEPLOY: - self.deploy(sdk.config) + params = FinalizeParams( + typegraph_path=self.typegraph_path, + prefix=self.global_config.prefix, + artifact_resolution=True, + codegen=False, + prisma_migration=PrismaMigrationConfig( + migrations_dir=self.typegraph_config.migrations_dir, + migration_actions=[(k, v) for k, v in self.typegraph_config.migration_actions.items()], + default_migration_action=self.typegraph_config.default_migration_action + ) + ) + + if command is None: + raise Exception("MCLI_ACTION env variable required") + elif command == Command.SERIALIZE: + self.serialize(params) + elif command == Command.DEPLOY: + self.deploy(params) else: - raise Exception(f"command {sdk.command.value} not supported") + raise Exception(f"command {command.value} not supported") - def serialize(self, config: CLIConfigRequest): + def serialize(self, config: FinalizeParams): try: - artifact_cfg = config.artifacts_config - artifact_cfg.prefix = ( - config.prefix - ) # prefix from cli overrides the current value - res = self.typegraph.serialize(artifact_cfg) + res = self.typegraph.serialize(config) + Log.success(res.tgJson, noencode=True) except Exception as err: - log.debug(traceback.format_exc()) - log.failure({"typegraph": self.typegraph.name, "error": str(err)}) - return - - return self.relay_data_to_cli(Command.SERIALIZE, data=json.loads(res.tgJson)) + Log.debug(traceback.format_exc()) + Log.failure({"typegraph": self.typegraph.name, "error": str(err)}) - def deploy(self, config: CLIConfigRequest): - typegate = config.typegate + def deploy(self, config: FinalizeParams): + typegate = self.global_config.typegate + if typegate is None: + raise Exception("unexpected") if typegate.auth is None: raise Exception( f'{self.typegraph.name}" received null or undefined "auth" field on the configuration' ) - artifacts_config = config.artifacts_config - artifacts_config.prefix = config.prefix # priority - params = TypegraphDeployParams( - base_url=typegate.endpoint, - auth=typegate.auth, - artifacts_config=artifacts_config, - secrets=config.secrets, - typegraph_path=self.typegraph_path, - ) - # hack for allowing tg.serialize(config) to be called more than once - frozen_out = freeze_tg_output(artifacts_config, self.typegraph) + frozen_out = freeze_tg_output(config, self.typegraph) try: - frozen_serialized = frozen_out.serialize(artifacts_config) + frozen_serialized = frozen_out.serialize(config) # noqa except Exception as err: - log.debug(traceback.format_exc()) - log.failure({"typegraph": self.typegraph.name, "error": str(err)}) + Log.debug(traceback.format_exc()) + Log.failure({"typegraph": self.typegraph.name, "error": str(err)}) return - if artifacts_config.codegen: - self.relay_data_to_cli( - initiator=Command.CODEGEN, data=json.loads(frozen_serialized.tgJson) - ) + if config.codegen: + raise Exception("not implemented") try: - ret = tg_deploy(frozen_out, params) - except Exception as err: - log.debug(traceback.format_exc()) - log.failure({"typegraph": self.typegraph.name, "error": str(err)}) - return - - log.debug("deploy result", {"deployResult": ret.typegate}) - log.success({"typegraph": self.typegraph.name}) - - def request_command(self) -> CLIServerResponse: - config = self.request_config() - req = request.Request(f"{self.endpoint}/command") - raw = request.urlopen(req).read().decode() - cli_rep = json.loads(raw)["data"] - return CLIServerResponse(command=Command(cli_rep), config=config) - - def request_config(self) -> CLIConfigRequest: - tg_name = self.typegraph.name - tg_path = parse.quote(self.typegraph_path) - req = request.Request( - f"{self.endpoint}/config?typegraph={tg_name}&typegraph_path={tg_path}" - ) - raw = request.urlopen(req).read().decode() - cli_res = json.loads(raw)["data"] - - prefix = None - if exist_and_not_null(cli_res, "prefix"): - prefix = cli_res["prefix"] - - auth = None - if exist_and_not_null(cli_res["typegate"], "auth"): - raw_auth = cli_res["typegate"]["auth"] - auth = BasicAuth(raw_auth["username"], raw_auth["password"]) - - artifact_config_raw = cli_res["artifactsConfig"] - migration_action_raw = artifact_config_raw["prismaMigration"] - - return CLIConfigRequest( - typegate=Typegate(endpoint=cli_res["typegate"]["endpoint"], auth=auth), - prefix=prefix, - secrets=cli_res["secrets"], - artifacts_config=ArtifactResolutionConfig( - dir=artifact_config_raw["dir"], - prefix=prefix, - prisma_migration=MigrationConfig( - global_action=json_to_mig_action( - migration_action_raw["globalAction"] - ), - runtime_actions=[ - (rt, json_to_mig_action(act)) - for [rt, act] in migration_action_raw["runtimeAction"] - ], - migration_dir=artifact_config_raw["prismaMigration"][ - "migrationDir" - ], + params = TypegraphDeployParams( + typegate=TypegateConnectionOptions( + url=typegate.endpoint, + auth=typegate.auth, ), - disable_artifact_resolution=artifact_config_raw[ - "disableArtifactResolution" - ], - codegen=artifact_config_raw["codegen"], - ), - ) - - def relay_data_to_cli(self, initiator: Command, data: Any): - response = { - "command": initiator.value, - "typegraphName": self.typegraph.name, - "typegraphPath": self.typegraph_path, - "data": data, - } - req = request.Request( - url=f"{self.endpoint}/response", - method="POST", - headers={"Content-Type": "application/json"}, - data=json.dumps(response).encode("utf-8"), - ) - request.urlopen(req) - - def relay_error_to_cli(self, initiator: Command, code: str, msg: str, value: Any): - response = { - "command": initiator.value, - "typegraphName": self.typegraph.name, - "typegraphPath": self.typegraph_path, - "error": {"code": code, "msg": msg, "value": value}, - } - req = request.Request( - url=f"{self.endpoint}/response", - method="POST", - headers={"Content-Type": "application/json"}, - data=json.dumps(response).encode("utf-8"), - ) - request.urlopen(req) - - -def exist_and_not_null(obj: dict, field: str): - if field in obj: - return obj[field] is not None - return False + typegraph_path = self.typegraph_path, + prefix=config.prefix, + secrets=self.typegraph_config.secrets, + migrations_dir=self.typegraph_config.migrations_dir, + migration_actions = self.typegraph_config.migration_actions, + default_migration_action=self.typegraph_config.default_migration_action, + ) + ret = tg_deploy(frozen_out, params) + response = ret.response + Log.debug("response", response) -def json_to_mig_action(obj: dict) -> MigrationAction: - return MigrationAction( - create=obj["create"], - reset=obj["reset"], - ) + if not isinstance(response, dict): + raise Exception("unexpected") + Log.success({ "typegraph": self.typegraph.name, **response }) + except Exception as err: + Log.debug(traceback.format_exc()) + Log.failure({"typegraph": self.typegraph.name, "error": str(err)}) + return diff --git a/typegraph/python/typegraph/graph/typegraph.py b/typegraph/python/typegraph/graph/typegraph.py index 25ef18d2bf..79098f5af8 100644 --- a/typegraph/python/typegraph/graph/typegraph.py +++ b/typegraph/python/typegraph/graph/typegraph.py @@ -7,16 +7,17 @@ from typing import TYPE_CHECKING, Callable, List, Optional, Union, Any from typegraph.gen.exports.core import ( - ArtifactResolutionConfig, + FinalizeParams, Rate, TypegraphInitParams, ) from typegraph.gen.exports.core import ( Cors as CoreCors, ) +from typegraph.gen.exports.utils import Auth from typegraph.gen.types import Err -from typegraph.graph.params import Auth, Cors, RawAuth +from typegraph.graph.params import Cors, RawAuth from typegraph.graph.shared_types import FinalizationResult, TypegraphOutput from typegraph.policy import Policy, PolicyPerEffect, PolicySpec, get_policy_chain from typegraph.wit import core, store, wit_utils @@ -178,12 +179,13 @@ def typegraph( prefix: Optional[str] = None, ) -> Callable[[Callable[[Graph], None]], Callable[[], TypegraphOutput]]: def decorator(builder: Callable[[Graph], None]) -> TypegraphOutput: - actual_name = name if name is None: import re # To kebab case actual_name = re.sub("_", "-", builder.__name__) + else: + actual_name = name tg = Typegraph( name=actual_name, @@ -195,6 +197,15 @@ def decorator(builder: Callable[[Graph], None]) -> TypegraphOutput: Typegraph._context.append(tg) + default_cors = CoreCors( + allow_credentials=True, + allow_headers=[], + allow_methods=[], + allow_origin=[], + expose_headers=[], + max_age_sec=None, + ) + core.init_typegraph( store, TypegraphInitParams( @@ -202,7 +213,7 @@ def decorator(builder: Callable[[Graph], None]) -> TypegraphOutput: dynamic=tg.dynamic, path=tg.path, rate=tg.rate, - cors=tg.cors, + cors=tg.cors or default_cors, prefix=tg.prefix, ), ) @@ -214,7 +225,7 @@ def decorator(builder: Callable[[Graph], None]) -> TypegraphOutput: # config is only known at deploy time def serialize_with_artifacts( - config: ArtifactResolutionConfig, + config: FinalizeParams, ): finalization_result = core.finalize_typegraph(store, config) if isinstance(finalization_result, Err): diff --git a/typegraph/python/typegraph/io.py b/typegraph/python/typegraph/io.py new file mode 100644 index 0000000000..b63848f043 --- /dev/null +++ b/typegraph/python/typegraph/io.py @@ -0,0 +1,152 @@ +from typing import Any, Optional, Dict +from fileinput import FileInput +from dataclasses import dataclass +from typegraph.graph.shared_types import BasicAuth +from typegraph.gen.exports.core import MigrationAction +import json + + +_JSON_RPC_VERSION = "2.0" + + +class Log: + + @staticmethod + def __format(*largs: Any): + return " ".join(map(str, largs)) + + @staticmethod + def debug(*largs: Any): + print("debug:", Log.__format(*largs)) + + @staticmethod + def info(*largs: Any): + print("info:", Log.__format(*largs)) + + @staticmethod + def warn(*largs: Any): + print("warning:", Log.__format(*largs)) + + @staticmethod + def error(*largs: Any): + print("error:", Log.__format(*largs)) + + @staticmethod + def failure(data: Any): + print("failure:", json.dumps(data)) + + @staticmethod + def success(data: Any, noencode: bool = False): + if noencode: + print("success:", data) + else: + print("success:", json.dumps(data)) + + +class _RpcResponseReader: + input: FileInput + + def __init__(self): + self.input = FileInput('-') + + def read(self, rpc_id: int): + while True: + line = self.input.readline() + try: + parsed = json.loads(line) + except Exception: + Log.error("rpc response: failed to parse input as json") + continue + + if parsed.get("jsonrpc") != _JSON_RPC_VERSION: + Log.error("rpc response: invalid jsonrpc version") + continue + + if parsed.get("id") != rpc_id: + Log.error(f"rpc response: expected sequential requestests, unexpected rpc id {parsed.get('id')}") + continue + + return parsed.get("result") + +class _RpcCall: + response_reader = _RpcResponseReader() + latest_rpc_id = 0 + + @classmethod + def call(cls, method: str, params: Any): + cls.latest_rpc_id = cls.latest_rpc_id + 1 + rpc_id = cls.latest_rpc_id + rpc_message = json.dumps({ + "jsonrpc": _JSON_RPC_VERSION, + "id": rpc_id, + "method": method, + "params": params + }) + + print(f"jsonrpc: {rpc_message}") + return cls.response_reader.read(rpc_id) + + +@dataclass +class TypegateConfig: + endpoint: str + auth: BasicAuth + +@dataclass +class GlobalConfig: + typegate: Optional[TypegateConfig] + prefix: Optional[str] + + +def migration_action_from_dict(raw: Dict[str, bool]) -> MigrationAction: + return MigrationAction( + apply=raw.get("apply", False), + create=raw.get("create", False), + reset=raw.get("reset", False), + ) + + +@dataclass +class TypegraphConfig: + secrets: Dict[str, str] + artifact_resolution: bool + migration_actions: Dict[str, MigrationAction] + default_migration_action: MigrationAction + migrations_dir: str + +class Rpc: + @staticmethod + def get_global_config() -> GlobalConfig: + # TODO validation?? + res = _RpcCall.call("queryGlobalConfig", None) + raw_typegate = res.get("typegate") + typegate = None + if raw_typegate is not None: + raw_auth = raw_typegate.get("auth") + typegate = TypegateConfig( + endpoint=raw_typegate.get("endpoint"), + auth=BasicAuth( + username=raw_auth.get("username"), + password=raw_auth.get("password") + ) + ) + return GlobalConfig( + typegate=typegate, + prefix=res.get("prefix") + ) + + @staticmethod + def get_typegraph_config(typegraph: str): + res = _RpcCall.call("queryTypegraphConfig", { + "typegraph": typegraph + }) + + migration_actions = { k: migration_action_from_dict(v) for k, v in res.get("migrationActions").items() } + + return TypegraphConfig( + secrets=res.get("secrets"), + artifact_resolution=res.get("artifactResolution"), + migration_actions=migration_actions, + default_migration_action=migration_action_from_dict(res.get("defaultMigrationAction")), + migrations_dir=res.get("migrationsDir") + ) diff --git a/typegraph/python/typegraph/log.py b/typegraph/python/typegraph/log.py deleted file mode 100644 index 49b63dce78..0000000000 --- a/typegraph/python/typegraph/log.py +++ /dev/null @@ -1,30 +0,0 @@ -from typing import Any -import json - - -def __format(*largs: Any): - return " ".join(map(str, largs)) - - -def debug(*largs: Any): - print("debug:", __format(*largs)) - - -def info(*largs: Any): - print("info:", __format(*largs)) - - -def warn(*largs: Any): - print("warning:", __format(*largs)) - - -def error(*largs: Any): - print("error:", __format(*largs)) - - -def failure(data: Any): - print("failure:", json.dumps(data)) - - -def success(data: Any): - print("success:", json.dumps(data)) diff --git a/typegraph/python/typegraph/utils.py b/typegraph/python/typegraph/utils.py index c20bf5ef0c..007dbcd331 100644 --- a/typegraph/python/typegraph/utils.py +++ b/typegraph/python/typegraph/utils.py @@ -5,7 +5,7 @@ from functools import reduce from typing import Any, Dict, List, Optional, Tuple, Union -from typegraph.gen.exports.core import ArtifactResolutionConfig +from typegraph.gen.exports.core import FinalizeParams from typegraph.gen.exports.utils import ReducePath, ReduceValue from typegraph.graph.shared_types import FinalizationResult, TypegraphOutput from typegraph.injection import InheritDef, serialize_static_injection @@ -84,7 +84,7 @@ def unpack_tarb64(tar_b64: str, dest: str): def freeze_tg_output( - config: ArtifactResolutionConfig, tg_output: TypegraphOutput + config: FinalizeParams, tg_output: TypegraphOutput ) -> TypegraphOutput: if tg_output.name not in frozen_memo: frozen_memo[tg_output.name] = tg_output.serialize(config) diff --git a/typegraph/python/typegraph/wit.py b/typegraph/python/typegraph/wit.py index 800af280ce..ba852d23ae 100644 --- a/typegraph/python/typegraph/wit.py +++ b/typegraph/python/typegraph/wit.py @@ -12,8 +12,8 @@ # Make sure the imports are similar to the node implementation from typegraph.gen.exports.core import ( - ArtifactResolutionConfig, # noqa - MigrationConfig, # noqa + FinalizeParams, # noqa + PrismaMigrationConfig, # noqa MigrationAction, # noqa ) From 08770af420a8f64e5ef1ec28a2927e65777a85be Mon Sep 17 00:00:00 2001 From: Natoandro Date: Fri, 7 Jun 2024 14:27:07 +0300 Subject: [PATCH 12/35] fix python typegraph deployment, oauth2 example --- examples/metatype.yaml | 2 +- examples/typegraphs/files-upload.py | 10 ++-- examples/typegraphs/files-upload.ts | 49 +++++++++++-------- typegraph/python/typegraph/graph/tg_deploy.py | 2 +- 4 files changed, 36 insertions(+), 27 deletions(-) diff --git a/examples/metatype.yaml b/examples/metatype.yaml index 25558bb2da..730b857b7c 100644 --- a/examples/metatype.yaml +++ b/examples/metatype.yaml @@ -82,7 +82,7 @@ typegates: BASIC_admin: "password" jwt-authentication: CUSTOM_JWT: "wOoyc8ijEHP99oASvJ0IXmHDOMYQH6" - oauth2_authentication: + oauth2-authentication: GITHUB_CLIENT_ID: infisical://app.infisical.com/643057c5bb17b13ef6e73d99/prod/GITHUB_CLIENT_ID GITHUB_CLIENT_SECRET: infisical://app.infisical.com/643057c5bb17b13ef6e73d99/prod/GITHUB_CLIENT_SECRET authentication: diff --git a/examples/typegraphs/files-upload.py b/examples/typegraphs/files-upload.py index 4255f3e20d..d61ebc3367 100644 --- a/examples/typegraphs/files-upload.py +++ b/examples/typegraphs/files-upload.py @@ -24,9 +24,9 @@ def files_upload(g: Graph): g.expose( Policy.public(), # we can then generate helpers for interacting with our runtime - listObjects=s3.list("bucket"), - getDownloadUrl=s3.presign_get("bucket"), - signUploadUrl=s3.presign_put("bucket"), - upload=s3.upload("bucket", t.file(allow=["image/png", "image/jpeg"])), - uploadMany=s3.upload_all("bucket"), + listObjects=s3.list("examples"), + getDownloadUrl=s3.presign_get("examples"), + signUploadUrl=s3.presign_put("examples"), + upload=s3.upload("examples", t.file(allow=["image/png", "image/jpeg"])), + uploadMany=s3.upload_all("examples"), ) diff --git a/examples/typegraphs/files-upload.ts b/examples/typegraphs/files-upload.ts index a7419373e2..b1caf95ae4 100644 --- a/examples/typegraphs/files-upload.ts +++ b/examples/typegraphs/files-upload.ts @@ -1,24 +1,33 @@ import { Policy, t, typegraph } from "@typegraph/sdk/index.js"; import { S3Runtime } from "@typegraph/sdk/providers/aws.js"; -await typegraph({ - name: "files-upload", - // skip:next-line - cors: { allowOrigin: ["https://metatype.dev", "http://localhost:3000"] }, -}, (g) => { - const s3 = new S3Runtime({ - hostSecret: "S3_HOST", - regionSecret: "S3_REGION", - accessKeySecret: "S3_ACCESS_KEY", - secretKeySecret: "S3_SECRET_KEY", - pathStyleSecret: "S3_PATH_STYLE", - }); +await typegraph( + { + name: "files-upload", + // skip:next-line + cors: { allowOrigin: ["https://metatype.dev", "http://localhost:3000"] }, + }, + (g) => { + const s3 = new S3Runtime({ + hostSecret: "S3_HOST", + regionSecret: "S3_REGION", + accessKeySecret: "S3_ACCESS_KEY", + secretKeySecret: "S3_SECRET_KEY", + pathStyleSecret: "S3_PATH_STYLE", + }); - g.expose({ - listObjects: s3.list("bucket"), - getDownloadUrl: s3.presignGet({ bucket: "bucket" }), - signUploadUrl: s3.presignPut({ bucket: "bucket" }), - upload: s3.upload("bucket", t.file({ allow: ["image/png", "image/jpeg"] })), - uploadMany: s3.uploadAll("bucket"), - }, Policy.public()); -}); + g.expose( + { + listObjects: s3.list("examples"), + getDownloadUrl: s3.presignGet({ bucket: "examples" }), + signUploadUrl: s3.presignPut({ bucket: "examples" }), + upload: s3.upload( + "examples", + t.file({ allow: ["image/png", "image/jpeg"] }), + ), + uploadMany: s3.uploadAll("examples"), + }, + Policy.public(), + ); + }, +); diff --git a/typegraph/python/typegraph/graph/tg_deploy.py b/typegraph/python/typegraph/graph/tg_deploy.py index 2ea4d9b1a7..c579db492a 100644 --- a/typegraph/python/typegraph/graph/tg_deploy.py +++ b/typegraph/python/typegraph/graph/tg_deploy.py @@ -61,7 +61,7 @@ def tg_deploy(tg: TypegraphOutput, params: TypegraphDeployParams) -> DeployResul sep = "/" if not typegate.url.endswith("/") else "" url = typegate.url + sep + "typegate" - headers = {"Content-Type": "application/json", "User-Agent": f"Mozilla/5.0 TypegraphSdk/{sdk_version} Python/{python_version()}"} + headers = {"Content-Type": "application/json", "User-Agent": f"TypegraphSdk/{sdk_version} Python/{python_version()}"} if typegate.auth is not None: headers["Authorization"] = typegate.auth.as_header_value() From 5650b06398878e0f64d56d1c05e03098dbbd3ce1 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Sat, 8 Jun 2024 08:22:30 +0300 Subject: [PATCH 13/35] custom rpc handlers --- Cargo.lock | 143 ++--- examples/metatype.yaml | 2 +- libs/common/src/node.rs | 7 +- libs/metagen/src/lib.rs | 4 +- meta-cli/Cargo.toml | 24 +- meta-cli/src/cli/deploy.rs | 65 +-- meta-cli/src/cli/doctor.rs | 3 +- meta-cli/src/cli/gen.rs | 58 +- meta-cli/src/cli/serialize.rs | 86 +-- meta-cli/src/config.rs | 27 +- meta-cli/src/deploy/actors/discovery.rs | 6 +- meta-cli/src/deploy/actors/loader.rs | 260 --------- meta-cli/src/deploy/actors/mod.rs | 1 - meta-cli/src/deploy/actors/task.rs | 160 +++--- meta-cli/src/deploy/actors/task/action.rs | 42 +- meta-cli/src/deploy/actors/task/command.rs | 111 +++- .../src/deploy/actors/task/command/python.rs | 26 + .../deploy/actors/task/command/typescript.rs | 141 +++++ meta-cli/src/deploy/actors/task/deploy.rs | 192 ++++--- .../deploy/actors/task/deploy/migrations.rs | 27 +- meta-cli/src/deploy/actors/task/serialize.rs | 91 ++-- .../actors/task_manager/retry_manager.rs | 2 + meta-cli/src/deploy/actors/watcher.rs | 7 +- .../src/deploy/push/migration_resolution.rs | 89 ++- meta-cli/src/deploy/push/pusher.rs | 19 +- meta-cli/src/typegraph/loader/discovery.rs | 122 +---- meta-cli/src/typegraph/loader/mod.rs | 510 ------------------ typegate/tests/e2e/cli/dev_test.ts | 331 ++++++------ typegraph/core/src/utils/metagen_utils.rs | 2 +- typegraph/node/sdk/src/tg_artifact_upload.ts | 3 +- typegraph/python/typegraph/deploy/request.py | 32 ++ typegraph/python/typegraph/envs/cli.py | 89 +++ typegraph/python/typegraph/graph/tg_manage.py | 137 ++--- typegraph/python/typegraph/graph/typegraph.py | 20 +- typegraph/python/typegraph/io.py | 101 ++-- 35 files changed, 1190 insertions(+), 1750 deletions(-) delete mode 100644 meta-cli/src/deploy/actors/loader.rs create mode 100644 meta-cli/src/deploy/actors/task/command/python.rs create mode 100644 meta-cli/src/deploy/actors/task/command/typescript.rs create mode 100644 meta-cli/src/deploy/actors/task_manager/retry_manager.rs create mode 100644 typegraph/python/typegraph/deploy/request.py create mode 100644 typegraph/python/typegraph/envs/cli.py diff --git a/Cargo.lock b/Cargo.lock index 418f178134..5464789f3a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -950,12 +950,6 @@ version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" -[[package]] -name = "base64" -version = "0.20.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ea22880d78093b0cbe17c89f64a7d457941e65759157ec6cb31a31d652b05e5" - [[package]] name = "base64" version = "0.21.7" @@ -1331,6 +1325,42 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bf2a5fb3207c12b5d208ebc145f967fea5cac41a021c37417ccc31ba40f39ee" +[[package]] +name = "cached" +version = "0.51.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd93a9f06ec296ca66b4c26fafa9ed63f32c473d7a708a5f28563ee64c948515" +dependencies = [ + "ahash 0.8.11", + "async-trait", + "cached_proc_macro", + "cached_proc_macro_types", + "futures", + "hashbrown 0.14.5", + "instant", + "once_cell", + "thiserror", + "tokio", +] + +[[package]] +name = "cached_proc_macro" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "771aa57f3b17da6c8bcacb187bb9ec9bc81c8160e72342e67c329e0e1651a669" +dependencies = [ + "darling 0.20.9", + "proc-macro2", + "quote", + "syn 2.0.65", +] + +[[package]] +name = "cached_proc_macro_types" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ade8366b8bd5ba243f0a58f036cc0ca8a2f069cff1a2351ef1cac6b083e16fc0" + [[package]] name = "cap-fs-ext" version = "3.1.0" @@ -3801,7 +3831,7 @@ checksum = "64fba5a42bd76a17cad4bfa00de168ee1cbfa06a5e8ce992ae880218c05641a9" dependencies = [ "byteorder", "dynasm", - "memmap2 0.5.10", + "memmap2", ] [[package]] @@ -3939,15 +3969,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "encoding_rs_io" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cc3c5651fb62ab8aa3103998dade57efdd028544bd300516baa31840c252a83" -dependencies = [ - "encoding_rs", -] - [[package]] name = "endian-type" version = "0.1.2" @@ -4947,85 +4968,6 @@ dependencies = [ "thiserror", ] -[[package]] -name = "grep" -version = "0.2.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd79f01019ef2fe3978232135f5a7237baca9a6c6ed4dbbe9e5a51234e2306c5" -dependencies = [ - "grep-cli", - "grep-matcher", - "grep-printer", - "grep-regex", - "grep-searcher", -] - -[[package]] -name = "grep-cli" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea40788c059ab8b622c4d074732750bfb3bd2912e2dd58eabc11798a4d5ad725" -dependencies = [ - "bstr", - "globset", - "libc", - "log", - "termcolor", - "winapi-util", -] - -[[package]] -name = "grep-matcher" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47a3141a10a43acfedc7c98a60a834d7ba00dfe7bec9071cbfc19b55b292ac02" -dependencies = [ - "memchr", -] - -[[package]] -name = "grep-printer" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e14551578f49da1f774b70da5bd1b8c20bbbead01620c426cb0a217536d95a6a" -dependencies = [ - "base64 0.20.0", - "bstr", - "grep-matcher", - "grep-searcher", - "serde 1.0.202", - "serde_json", - "termcolor", -] - -[[package]] -name = "grep-regex" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f748bb135ca835da5cbc67ca0e6955f968db9c5df74ca4f56b18e1ddbc68230d" -dependencies = [ - "bstr", - "grep-matcher", - "log", - "regex-automata 0.4.6", - "regex-syntax 0.8.3", -] - -[[package]] -name = "grep-searcher" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba536ae4f69bec62d8839584dd3153d3028ef31bb229f04e09fb5a9e5a193c54" -dependencies = [ - "bstr", - "encoding_rs", - "encoding_rs_io", - "grep-matcher", - "log", - "memchr", - "memmap2 0.9.4", -] - [[package]] name = "group" version = "0.13.0" @@ -6479,15 +6421,6 @@ dependencies = [ "libc", ] -[[package]] -name = "memmap2" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe751422e4a8caa417e13c3ea66452215d7d63e19e604f4980461212f3ae1322" -dependencies = [ - "libc", -] - [[package]] name = "memmem" version = "0.1.1" @@ -6522,6 +6455,7 @@ dependencies = [ "async-recursion", "async-trait", "base64 0.21.7", + "cached", "chrono", "clap", "clap-verbosity-flag", @@ -6539,7 +6473,6 @@ dependencies = [ "futures", "git2", "globset", - "grep", "ignore", "include_dir", "indexmap 2.2.6", diff --git a/examples/metatype.yaml b/examples/metatype.yaml index 730b857b7c..73fc8acf59 100644 --- a/examples/metatype.yaml +++ b/examples/metatype.yaml @@ -151,7 +151,7 @@ typegraphs: exclude: - "**/*" - "typegraphs/temporal.py" - deno: + typescript: exclude: - "typegraphs/temporal.ts" diff --git a/libs/common/src/node.rs b/libs/common/src/node.rs index 89521e00ac..359b68d10a 100644 --- a/libs/common/src/node.rs +++ b/libs/common/src/node.rs @@ -18,7 +18,8 @@ pub struct BasicAuth { pub password: String, } -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "camelCase")] pub struct Node { pub base_url: Url, pub prefix: Option, @@ -116,7 +117,7 @@ impl Node { Ok(()) } - pub async fn typegraph(&self, name: &str) -> Result, Error> { + pub async fn typegraph(&self, name: &str) -> Result>, Error> { let res = self .post("/typegate") .map_err(Error::Other)? @@ -144,7 +145,7 @@ impl Node { return Ok(None); }; serde_json::from_str::(&res.serialized) - .map(Some) + .map(|tg| Some(Box::new(tg))) .map_err(|err| Error::Other(anyhow::anyhow!(err))) } } diff --git a/libs/metagen/src/lib.rs b/libs/metagen/src/lib.rs index 28c12dac3f..76d69cc17d 100644 --- a/libs/metagen/src/lib.rs +++ b/libs/metagen/src/lib.rs @@ -50,8 +50,8 @@ pub enum GeneratorInputOrder { /// by [GeneratorInputOrder]. #[derive(Debug)] pub enum GeneratorInputResolved { - TypegraphFromTypegate { raw: Typegraph }, - TypegraphFromPath { raw: Typegraph }, + TypegraphFromTypegate { raw: Box }, + TypegraphFromPath { raw: Box }, } /// This type plays the "dispatcher" role to the command object diff --git a/meta-cli/Cargo.toml b/meta-cli/Cargo.toml index 146fcc5f7c..613b3cb7d6 100644 --- a/meta-cli/Cargo.toml +++ b/meta-cli/Cargo.toml @@ -12,11 +12,11 @@ repository = "https://github.com/metatypedev/metatype" include = ["src"] keywords = ["api", "composition", "typesystem", "graphql", "ecosystem"] categories = [ - "accessibility", - "api-bindings", - "data-structures", - "development-tools", - "wasm", + "accessibility", + "api-bindings", + "data-structures", + "development-tools", + "wasm", ] build = "build.rs" @@ -48,6 +48,7 @@ itertools = "0.11.0" enum_dispatch = "0.3.12" derive_more = "0.99.17" indoc.workspace = true +cached = { version = "0.51.3", features = ["proc_macro", "async"] } # concurrency tokio = { workspace = true, features = ["full"] } @@ -65,11 +66,11 @@ ctrlc = "3.4.1" question = "0.2.2" dialoguer = "0.11.0" self_update = { version = "0.38.0", features = [ - "archive-tar", - "archive-zip", - "compression-flate2", - "compression-zip-deflate", - "compression-zip-bzip2", + "archive-tar", + "archive-zip", + "compression-flate2", + "compression-zip-deflate", + "compression-zip-bzip2", ] } # codecs @@ -85,7 +86,6 @@ serde_yaml = "0.9.27" normpath = "1.1.1" directories = "5.0.1" include_dir = "0.7.3" -grep = "0.2.12" filetime = "0.2" notify-debouncer-mini = { version = "0.4.1", default-features = false } ignore = "0.4.20" @@ -112,7 +112,7 @@ prisma-models = { git = "https://github.com/prisma/prisma-engines", tag = "5.5.2 nix = { version = "0.28", features = ["signal"] } lade-sdk = "0.9.1" git2 = { version = "0.18.1", features = [ - "vendored-libgit2", + "vendored-libgit2", ], default-features = false } process-wrap = { version = "8.0", features = ["tokio1"] } diff --git a/meta-cli/src/cli/deploy.rs b/meta-cli/src/cli/deploy.rs index 018dabde99..cf7a6cd76e 100644 --- a/meta-cli/src/cli/deploy.rs +++ b/meta-cli/src/cli/deploy.rs @@ -2,7 +2,6 @@ // SPDX-License-Identifier: MPL-2.0 use self::actors::task::deploy::{DeployAction, DeployActionGenerator}; -use self::actors::task::TaskConfig; use self::actors::task_manager::{self, StopReason}; use super::{Action, ConfigArgs, NodeArgs}; use crate::com::store::{Command, Endpoint, ServerStore}; @@ -221,8 +220,6 @@ mod default_mode { use crate::config::PathOption; - use self::actors::task::deploy::MigrationAction; - use super::*; pub async fn run(deploy: Deploy) -> Result { @@ -231,21 +228,19 @@ mod default_mode { let mut secrets = deploy.secrets.clone(); secrets.apply_overrides(&deploy.options.secrets)?; - let task_config = TaskConfig::init(deploy.base_dir.clone()); - let action_generator = DeployActionGenerator { - task_config: task_config.into(), - node: deploy.node.clone().into(), - secrets: secrets.hydrate(deploy.base_dir.clone()).await?.into(), - migrations_dir: deploy + let action_generator = DeployActionGenerator::new( + deploy.node.into(), + // TODO no hydrate here + secrets.hydrate(deploy.base_dir.clone()).await?.into(), + deploy.config.dir().unwrap_or_log().into(), + deploy.base_dir.clone(), + deploy .config .prisma_migrations_base_dir(PathOption::Absolute) .into(), - default_migration_action: MigrationAction { - apply: true, - create: deploy.options.create_migration, - reset: deploy.options.allow_destructive, - }, - }; + deploy.options.create_migration, + deploy.options.allow_destructive, + ); let mut init = TaskManagerInit::::new( deploy.config.clone(), @@ -299,8 +294,6 @@ mod watch_mode { use crate::config::PathOption; - use self::actors::task::deploy::MigrationAction; - use super::*; #[tracing::instrument] @@ -311,40 +304,22 @@ mod watch_mode { let console = ConsoleActor::new(Arc::clone(&deploy.config)).start(); - let ctrlc_handler_data = Arc::new(std::sync::Mutex::new(None)); - - let data = ctrlc_handler_data.clone(); - ctrlc::set_handler(move || { - let mut data = data.lock().unwrap(); - if let Some(CtrlCHandlerData { - watcher, - task_manager, - }) = data.take() - { - watcher.do_send(watcher::message::Stop); - task_manager.do_send(task_manager::message::Stop); - } - }) - .context("setting Ctrl-C handler")?; - - let task_config = TaskConfig::init(deploy.base_dir.clone()); let mut secrets = deploy.secrets.clone(); secrets.apply_overrides(&deploy.options.secrets)?; - let action_generator = DeployActionGenerator { - task_config: task_config.into(), - node: deploy.node.into(), - secrets: secrets.hydrate(deploy.base_dir.clone()).await?.into(), - migrations_dir: deploy + let action_generator = DeployActionGenerator::new( + deploy.node.into(), + // TODO no hydrate here + secrets.hydrate(deploy.base_dir.clone()).await?.into(), + deploy.config.dir().unwrap_or_log().into(), + deploy.base_dir.clone(), + deploy .config .prisma_migrations_base_dir(PathOption::Absolute) .into(), - default_migration_action: MigrationAction { - apply: true, - create: deploy.options.create_migration, - reset: deploy.options.allow_destructive, - }, - }; + deploy.options.create_migration, + deploy.options.allow_destructive, + ); // ServerStore::set_secrets(secrets.hydrate(deploy.base_dir.clone()).await?); diff --git a/meta-cli/src/cli/doctor.rs b/meta-cli/src/cli/doctor.rs index 1fcee239c6..30a24f6ac4 100644 --- a/meta-cli/src/cli/doctor.rs +++ b/meta-cli/src/cli/doctor.rs @@ -1,7 +1,7 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use crate::interlude::*; +use crate::{interlude::*, typegraph::loader::discovery::Discovery}; use super::{Action, ConfigArgs}; use crate::{ @@ -9,7 +9,6 @@ use crate::{ config::{Config, PIPFILE_FILES, PYPROJECT_FILES, REQUIREMENTS_FILES, VENV_FOLDERS}, fs::{clean_path, find_in_parents}, global_config::GlobalConfig, - typegraph::loader::Discovery, }; use actix_web::dev::ServerHandle; diff --git a/meta-cli/src/cli/gen.rs b/meta-cli/src/cli/gen.rs index d85a5e9626..1681931430 100644 --- a/meta-cli/src/cli/gen.rs +++ b/meta-cli/src/cli/gen.rs @@ -1,14 +1,12 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use crate::interlude::*; - use crate::cli::{Action, ConfigArgs, NodeArgs}; -use crate::{ - com::store::ServerStore, - config::Config, - deploy::actors::{console::ConsoleActor, loader::*}, -}; +use crate::config::PathOption; +use crate::deploy::actors::task::serialize::{SerializeAction, SerializeActionGenerator}; +use crate::deploy::actors::task_manager::{TaskManagerInit, TaskSource}; +use crate::interlude::*; +use crate::{com::store::ServerStore, config::Config, deploy::actors::console::ConsoleActor}; use actix::Actor; use actix_web::dev::ServerHandle; use clap::{Parser, ValueEnum}; @@ -16,6 +14,8 @@ use common::typegraph::Typegraph; use metagen::*; use serde_json::json; +use super::serialize::SerializeReportExt; + #[derive(ValueEnum, Debug, Clone)] enum GeneratorOp { /// missing module dependencies @@ -178,7 +178,7 @@ async fn load_tg_at( config: Arc, path: PathBuf, name: Option<&str>, -) -> anyhow::Result { +) -> anyhow::Result> { ServerStore::with( Some(crate::com::store::Command::Serialize), Some(config.as_ref().clone()), @@ -188,32 +188,24 @@ async fn load_tg_at( let console = ConsoleActor::new(Arc::clone(&config)).start(); - let (loader_event_tx, loader_event_rx) = tokio::sync::mpsc::unbounded_channel(); - - let loader = LoaderActor::new(Arc::clone(&config), console.clone(), loader_event_tx, 1) - .auto_stop() - .start(); - - let path = Arc::new(path); + let config_dir: Arc = config.dir().unwrap_or_log().into(); + let init = TaskManagerInit::::new( + config.clone(), + SerializeActionGenerator::new( + config_dir.clone(), + config_dir, // TODO cwd + config + .prisma_migrations_base_dir(PathOption::Absolute) + .into(), + ), + console, + TaskSource::Static(vec![path.clone()]), + ) + .max_parallel_tasks(1); + + let report = init.run().await; + let mut tgs = report.into_typegraphs(); - loader.do_send(LoadModule(path.clone())); - let mut tgs: Vec = vec![]; - let mut event_rx = loader_event_rx; - while let Some(event) = event_rx.recv().await { - match event { - LoaderEvent::Typegraph(tg_infos) => { - let responses = ServerStore::get_responses_or_fail(&tg_infos.path)?; - for (_, tg) in responses.iter() { - tgs.push(tg.as_typegraph()?); - } - } - LoaderEvent::Stopped(res) => { - if let StopBehavior::ExitFailure(err) = res { - bail!("LoaderActor exit failure: {err}"); - } - } - } - } if tgs.is_empty() { bail!("not typegraphs loaded from path at {path:?}") } diff --git a/meta-cli/src/cli/serialize.rs b/meta-cli/src/cli/serialize.rs index 76ac2c986f..140edc5e4a 100644 --- a/meta-cli/src/cli/serialize.rs +++ b/meta-cli/src/cli/serialize.rs @@ -1,18 +1,18 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 - use super::{Action, ConfigArgs}; use crate::com::store::{Command, ServerStore}; -use crate::config::Config; +use crate::config::{Config, PathOption}; use crate::deploy::actors::console::ConsoleActor; use crate::deploy::actors::task::serialize::{ SerializeAction, SerializeActionGenerator, SerializeError, }; -use crate::deploy::actors::task::{TaskConfig, TaskFinishStatus}; -use crate::deploy::actors::task_manager::{TaskManagerInit, TaskSource}; +use crate::deploy::actors::task::TaskFinishStatus; +use crate::deploy::actors::task_manager::{Report, TaskManagerInit, TaskSource}; use crate::interlude::*; use actix_web::dev::ServerHandle; use clap::Parser; +use common::typegraph::Typegraph; use core::fmt::Debug; use std::io::{self, Write}; use tokio::io::AsyncWriteExt; @@ -57,13 +57,7 @@ impl Action for Serialize { let dir = args.dir(); let config_path = args.config.clone(); - // config file is not used when `TypeGraph` files - // are provided in the CLI by flags - let config = if !self.files.is_empty() { - Config::default_in(&dir) - } else { - Config::load_or_find(config_path, &dir)? - }; + let config = Config::load_or_find(config_path, &dir)?; // Minimum setup ServerStore::with(Some(Command::Serialize), Some(config.to_owned())); @@ -73,7 +67,13 @@ impl Action for Serialize { let console = ConsoleActor::new(Arc::clone(&config)).start(); - let action_generator = SerializeActionGenerator::new(TaskConfig::init(args.dir().into())); + let action_generator = SerializeActionGenerator::new( + config.dir().unwrap_or_log().into(), + dir.into(), + config + .prisma_migrations_base_dir(PathOption::Absolute) + .into(), + ); if self.files.is_empty() { bail!("no file provided"); @@ -93,32 +93,7 @@ impl Action for Serialize { let report = init.run().await; // TODO no need to report errors - let tgs = report - .entries - .into_iter() - .map(|entry| match entry.status { - TaskFinishStatus::Finished(results) => results - .into_iter() - .collect::, SerializeError>>() - .map_err(|e| { - eyre::eyre!( - "serialization failed for typegraph '{}' at {:?}: {}", - e.typegraph, - entry.path, - e.error - ) - }), - TaskFinishStatus::Cancelled => { - Err(eyre::eyre!("serialization cancelled for {:?}", entry.path)) - } - TaskFinishStatus::Error => { - Err(eyre::eyre!("serialization failed for {:?}", entry.path)) - } - }) - .collect::>>()? - .into_iter() - .flatten() - .collect::>(); + let tgs = report.into_typegraphs(); if let Some(tg_name) = self.typegraph.as_ref() { if let Some(tg) = tgs.iter().find(|tg| &tg.name().unwrap() == tg_name) { @@ -147,6 +122,41 @@ impl Action for Serialize { } } +pub trait SerializeReportExt { + fn into_typegraphs(self) -> Vec>; +} + +impl SerializeReportExt for Report { + fn into_typegraphs(self) -> Vec> { + self.entries + .into_iter() + .map(|entry| match entry.status { + TaskFinishStatus::Finished(results) => results + .into_iter() + .collect::, SerializeError>>() + .unwrap_or_else(|e| { + tracing::error!( + "serialization failed for typegraph '{}' at {:?}: {}", + e.typegraph, + entry.path, + e.error + ); + vec![] + }), + TaskFinishStatus::Cancelled => { + tracing::error!("serialization cancelled for {:?}", entry.path); + vec![] + } + TaskFinishStatus::Error => { + tracing::error!("serialization failed for {:?}", entry.path); + vec![] + } + }) + .flatten() + .collect() + } +} + impl Serialize { async fn write(&self, contents: &str) -> Result<()> { if let Some(path) = self.out.as_ref() { diff --git a/meta-cli/src/config.rs b/meta-cli/src/config.rs index 29d6263920..d31e44a2ab 100644 --- a/meta-cli/src/config.rs +++ b/meta-cli/src/config.rs @@ -3,6 +3,10 @@ use crate::interlude::*; +use crate::cli::NodeArgs; +use crate::fs::find_in_parents; +use crate::utils::BasicAuth; +use common::node::Node; use globset::{Glob, GlobSet, GlobSetBuilder}; use lazy_static::lazy_static; use reqwest::Url; @@ -11,11 +15,6 @@ use std::io; use std::slice; use std::str::FromStr; -use crate::cli::NodeArgs; -use crate::fs::find_in_parents; -use crate::utils::BasicAuth; -use common::node::Node; - pub const METATYPE_FILES: &[&str] = &["metatype.yml", "metatype.yaml"]; pub const VENV_FOLDERS: &[&str] = &[".venv"]; pub const PYPROJECT_FILES: &[&str] = &["pyproject.toml"]; @@ -158,10 +157,11 @@ pub struct Materializers { } #[derive(Deserialize, Debug, Clone, Copy, Hash, PartialEq, Eq)] -#[serde(rename_all = "snake_case")] +#[serde(rename_all = "lowercase")] pub enum ModuleType { Python, - Deno, + TypeScript, + JavaScript, } #[derive(Deserialize, Debug, Default, Clone)] @@ -214,7 +214,8 @@ impl Config { })?; let mut config: serde_yaml::Value = serde_yaml::from_reader(file)?; config.apply_merge()?; - let mut config: Self = serde_yaml::from_value(config)?; + let mut config: Self = + serde_yaml::from_value(config).context("could not parse metatype config file")?; config.path = Some(path.clone()); config.base_dir = { let mut path = path; @@ -276,7 +277,7 @@ impl Config { } } - /// `config migration dir` + `runtime` + `tg_name` + /// `config migration dir` + `runtime` + `tg_name` pub fn prisma_migrations_dir_rel(&self, tg_name: &str) -> PathBuf { let mut path = self .typegraphs @@ -296,6 +297,14 @@ impl Config { path.push(self.prisma_migrations_dir_rel(tg_name)); path } + + pub fn dir(&self) -> Result<&Path> { + self.path + .as_deref() + .ok_or_else(|| ferr!("config path required"))? + .parent() + .ok_or_else(|| ferr!("config path has no parent")) + } } #[cfg(test)] diff --git a/meta-cli/src/deploy/actors/discovery.rs b/meta-cli/src/deploy/actors/discovery.rs index 78ef505480..a90235315d 100644 --- a/meta-cli/src/deploy/actors/discovery.rs +++ b/meta-cli/src/deploy/actors/discovery.rs @@ -6,7 +6,7 @@ use crate::interlude::*; use pathdiff::diff_paths; -use crate::{config::Config, typegraph::loader::Discovery}; +use crate::{config::Config, typegraph::loader::discovery::Discovery}; use super::console::{Console, ConsoleActor}; use super::task::action::TaskAction; @@ -48,7 +48,6 @@ impl Actor for DiscoveryActor { #[tracing::instrument(skip(self))] fn started(&mut self, ctx: &mut Self::Context) { log::trace!("DiscoveryActor started; directory={:?}", self.directory); - let config = Arc::clone(&self.config); let dir = self.directory.clone(); let task_manager = self.task_manager.clone(); @@ -56,6 +55,9 @@ impl Actor for DiscoveryActor { let discovery = ctx.address(); let task_generator = self.task_generator.clone(); + console.info("starting discovery".to_string()); + console.warning("make sure to exclude all non-typegraph Python and TypeScript/JavaScript files in the metatype.yaml config file using the include/exclude patterns".to_string()); + let fut = async move { match Discovery::new(config, dir.to_path_buf()) .start(|path| match path { diff --git a/meta-cli/src/deploy/actors/loader.rs b/meta-cli/src/deploy/actors/loader.rs deleted file mode 100644 index 6f27d70e74..0000000000 --- a/meta-cli/src/deploy/actors/loader.rs +++ /dev/null @@ -1,260 +0,0 @@ -// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. -// SPDX-License-Identifier: MPL-2.0 -use crate::interlude::*; - -use std::sync::atomic::{AtomicU32, Ordering}; - -use actix::prelude::Context; -use actix::prelude::*; -use tokio::sync::{mpsc, oneshot}; - -use crate::config::Config; -use crate::typegraph::loader::{LoaderPool, TypegraphInfos}; -use crate::typegraph::postprocess::DenoModules; - -use super::console::{Console, ConsoleActor}; - -#[derive(Debug, Clone)] -pub struct PostProcessOptions { - pub deno: Option, -} - -impl Default for PostProcessOptions { - fn default() -> Self { - Self { - deno: Some(DenoModules::default()), - } - } -} - -impl PostProcessOptions { - pub fn no_deno(mut self) -> Self { - self.deno = None; - self - } - - pub fn deno_codegen(mut self, codegen: bool) -> Self { - self.deno = Some(DenoModules::default().codegen(codegen)); - self - } -} - -#[derive(Clone, Debug)] -pub enum StopBehavior { - ExitSuccess, - ExitFailure(String), - Restart, -} - -#[derive(Clone, Debug)] -pub enum LoaderEvent { - Typegraph(Box), - Stopped(StopBehavior), -} - -pub struct LoaderActor { - // config: Arc, - console: Addr, - stopped_tx: Option>, - stop_behavior: StopBehavior, - event_tx: mpsc::UnboundedSender, - counter: Option>, - loader_pool: Arc, -} - -impl LoaderActor { - pub fn new( - config: Arc, - console: Addr, - event_tx: mpsc::UnboundedSender, - max_parallel_loads: usize, - ) -> Self { - let loader_pool = Self::loader_pool(config.clone(), max_parallel_loads); - Self { - // config, - console, - stopped_tx: None, - stop_behavior: StopBehavior::ExitSuccess, - event_tx, - counter: None, - loader_pool: Arc::new(loader_pool), - } - } - - pub fn auto_stop(self) -> Self { - Self { - counter: Some(Arc::new(AtomicU32::new(0))), - ..self - } - } -} - -impl LoaderActor { - fn loader_pool(config: Arc, max_parallel_loads: usize) -> LoaderPool { - LoaderPool::new(config.base_dir.clone(), max_parallel_loads) - } - - #[tracing::instrument(skip(self))] - fn load_module(&self, self_addr: Addr, path: Arc) { - let loader_pool = self.loader_pool.clone(); - let console = self.console.clone(); - let counter = self.counter.clone(); - let fut = async move { - // TODO error handling? - let loader = loader_pool.get_loader().await.unwrap_or_log(); - match loader.load_module(path.clone()).await { - Ok(tgs_infos) => { - self_addr.do_send(LoadedModule(path.as_ref().to_owned().into(), tgs_infos)) - } - Err(err) => { - if counter.is_some() { - // auto stop - self_addr.do_send(TryStop(StopBehavior::ExitFailure(err.to_string()))); - } else { - console.error(format!("Loader error: {err:#}")); - } - } - } - }; - Arbiter::current().spawn(fut.in_current_span()); - } -} - -pub enum ReloadReason { - FileChanged, - FileCreated, - DependencyChanged(PathBuf), -} - -#[derive(Message)] -#[rtype(result = "()")] -pub struct LoadModule(pub Arc); - -#[derive(Message)] -#[rtype(result = "()")] -pub struct ReloadModule(pub Arc, pub ReloadReason); - -#[derive(Message)] -#[rtype(result = "()")] -pub struct TryStop(pub StopBehavior); - -#[derive(Message)] -#[rtype(result = "()")] -struct SetStoppedTx(oneshot::Sender); - -#[derive(Message)] -#[rtype(result = "()")] -struct LoadedModule(pub Arc, TypegraphInfos); - -impl Actor for LoaderActor { - type Context = Context; - - #[cfg(debug_assertions)] - fn started(&mut self, _ctx: &mut Self::Context) { - log::trace!("LoaderActor started"); - } - - fn stopped(&mut self, _ctx: &mut Self::Context) { - if let Some(tx) = self.stopped_tx.take() { - if let Err(err) = tx.send(self.stop_behavior.clone()) { - self.console - .warning(format!("failed to send stop signal: {err:?}")); - } - } - if let Err(err) = self - .event_tx - .send(LoaderEvent::Stopped(self.stop_behavior.clone())) - { - self.console - .warning(format!("failed to send stop event: {err}")); - } - log::trace!("LoaderActor stopped"); - } -} - -impl Handler for LoaderActor { - type Result = (); - - fn handle(&mut self, msg: LoadModule, ctx: &mut Context) -> Self::Result { - self.console.info(format!("Loading module {:?}", msg.0)); - - if let Some(counter) = self.counter.as_ref() { - counter.fetch_add(1, Ordering::SeqCst); - } - - self.load_module(ctx.address(), msg.0); - } -} - -impl Handler for LoaderActor { - type Result = (); - - fn handle(&mut self, msg: ReloadModule, ctx: &mut Context) -> Self::Result { - let reason = match msg.1 { - ReloadReason::FileChanged => "file changed".to_string(), - ReloadReason::FileCreated => "file created".to_string(), - ReloadReason::DependencyChanged(path) => format!("dependency changed: {:?}", path), - }; - self.console - .info(format!("Reloading module {:?}: {reason}", msg.0)); - if let Some(counter) = self.counter.as_ref() { - counter.fetch_add(1, Ordering::SeqCst); - } - - self.load_module(ctx.address(), msg.0); - } -} - -impl Handler for LoaderActor { - type Result = (); - - fn handle(&mut self, msg: LoadedModule, ctx: &mut Context) -> Self::Result { - let LoadedModule(path, tg_infos) = msg; - - if let Err(e) = self - .event_tx - .send(LoaderEvent::Typegraph(Box::new(tg_infos))) - { - self.console - .error(format!("failed to send typegraph: {:?}", e)); - if self.counter.is_some() { - // auto stop - ctx.stop(); - return; - } - } - - self.console.debug(format!("Loaded 1 file from {path:?}")); - if let Some(counter) = self.counter.as_ref() { - let count = counter.fetch_sub(1, Ordering::SeqCst); - if count == 1 { - self.console - .debug("All modules have been loaded. Stopping the loader.".to_string()); - ctx.notify(TryStop(StopBehavior::ExitSuccess)); - } - } - } -} - -impl Handler for LoaderActor { - type Result = (); - - fn handle(&mut self, msg: TryStop, ctx: &mut Context) -> Self::Result { - self.stop_behavior = msg.0; - ctx.stop(); - } -} - -impl Handler for LoaderActor { - type Result = (); - - fn handle(&mut self, msg: SetStoppedTx, _ctx: &mut Context) -> Self::Result { - self.stopped_tx = Some(msg.0); - } -} - -// pub fn stopped(addr: Addr) -> oneshot::Receiver { -// let (tx, rx) = oneshot::channel(); -// addr.do_send(SetStoppedTx(tx)); -// rx -// } diff --git a/meta-cli/src/deploy/actors/mod.rs b/meta-cli/src/deploy/actors/mod.rs index 5ede9e2414..de556fe75d 100644 --- a/meta-cli/src/deploy/actors/mod.rs +++ b/meta-cli/src/deploy/actors/mod.rs @@ -3,7 +3,6 @@ pub mod console; pub mod discovery; -pub mod loader; pub mod task; pub mod task_manager; pub mod watcher; diff --git a/meta-cli/src/deploy/actors/task.rs b/meta-cli/src/deploy/actors/task.rs index 98a3c21a70..32c6c5c73a 100644 --- a/meta-cli/src/deploy/actors/task.rs +++ b/meta-cli/src/deploy/actors/task.rs @@ -23,7 +23,6 @@ pub mod serialize; use self::action::{ActionFinalizeContext, ActionResult, TaskAction}; use super::console::{Console, ConsoleActor}; use super::task_manager::{self, TaskManager}; -use crate::com::server::get_instance_port; use crate::config::Config; use crate::interlude::*; use color_eyre::owo_colors::OwoColorize; @@ -76,12 +75,6 @@ pub mod message { use message::*; -#[derive(Debug)] -pub struct TaskConfig { - base_dir: Arc, - instance_port: u16, -} - #[derive(Serialize, Deserialize)] #[serde(untagged)] enum TaskOutput { @@ -93,15 +86,6 @@ enum TaskOutput { const TIMEOUT_ENV_NAME: &str = "LOADER_TIMEOUT_SECS"; const DEFAULT_TIMEOUT: u64 = 120; -impl TaskConfig { - pub fn init(base_dir: Arc) -> Self { - Self { - base_dir, - instance_port: get_instance_port(), - } - } -} - #[derive(Debug)] pub enum TaskFinishStatus { Cancelled, @@ -119,7 +103,7 @@ pub struct TaskActor { console: Addr, collected_output: Vec>, timeout_duration: Duration, - followup_task: A::Followup, + followup_task_options: Option, } impl TaskActor @@ -153,7 +137,7 @@ where }) .unwrap_or(DEFAULT_TIMEOUT), ), - followup_task: Default::default(), + followup_task_options: None, } } @@ -177,6 +161,9 @@ impl Actor for TaskActor { let fut = async move { match action.get_command().await { Ok(cmd) => { + let std_cmd = cmd.as_std(); + debug!("std command: {std_cmd:?}"); + debug!("command: {cmd:?}"); addr.do_send(StartProcess(cmd)); } Err(e) => { @@ -290,7 +277,6 @@ impl Handler for TaskActor { let addr = ctx.address(); let console = self.console.clone(); - let path = self.get_path_owned(); let action = self.action.clone(); let fut = async move { @@ -334,14 +320,6 @@ enum OutputLevel { Error, } -#[derive(Deserialize, Debug)] -#[serde(tag = "method", content = "params")] -#[serde(rename_all = "camelCase")] -enum RpcCall { - QueryGlobalConfig, - QueryTypegraphConfig { typegraph: String }, -} - #[derive(Serialize, Deserialize, Debug)] enum JsonRpcVersion { #[serde(rename = "2.0")] @@ -353,7 +331,7 @@ struct RpcRequest { jsonrpc: JsonRpcVersion, id: u32, #[serde(flatten)] - call: RpcCall, + call: serde_json::Value, } impl RpcRequest { @@ -496,83 +474,69 @@ impl Handler for TaskActor { type Result = (); fn handle(&mut self, Rpc(req): Rpc, ctx: &mut Context) -> Self::Result { + let action = self.action.clone(); let addr = ctx.address(); - match &req.call { - RpcCall::QueryGlobalConfig => { - let config = self.action.get_global_config(); - let response = req.response(config); - self.send_rpc_response(response, ctx); - // addr.do_send(SendRpcResponse(response)); + + let rpc_call: A::RpcCall = match serde_json::from_value(req.call.clone()) { + Ok(rpc_call) => rpc_call, + Err(err) => { + self.console + .error(format!("invalid jsonrpc request {req:?}: {err:?}")); + addr.do_send(Exit(TaskFinishStatus::::Error)); + return; } - RpcCall::QueryTypegraphConfig { typegraph } => { - let config = self.action.get_typegraph_config(typegraph); - let response = req.response(config); - self.send_rpc_response(response, ctx); - // addr.do_send(SendRpcResponse(response)); + }; + + let console = self.console.clone(); + + let fut = async move { + let id = req.id; + match action.get_rpc_response(&rpc_call).await { + Ok(response) => { + addr.do_send(message::SendRpcResponse(req.response(response))); + } + Err(err) => { + console.error(format!("failed to handle jsonrpc call {req:?}: {err:?}")); + addr.do_send(Exit(TaskFinishStatus::::Error)); + } } - } + }; + + ctx.spawn(fut.in_current_span().into_actor(self)); } } -impl TaskActor { - fn send_rpc_response(&mut self, response: RpcResponse, ctx: &mut Context) { - match serde_json::to_string(&response) { - Ok(response) => { - let stdin = self.process_stdin.clone().unwrap(); - let fut = async move { - let mut stdin = stdin.lock().await; - stdin - .write_all(response.as_bytes()) - .await - .expect("could not write rpc response to process stdin"); - stdin - .write_all(b"\n") - .await - .expect("could not write newline to process stdin"); - }; +impl Handler for TaskActor { + type Result = (); - ctx.spawn(fut.in_current_span().into_actor(self)); - } - Err(e) => { - self.console - .error(format!("could not serialize rpc response {e}")); + fn handle( + &mut self, + SendRpcResponse(response): SendRpcResponse, + ctx: &mut Context, + ) -> Self::Result { + { + let response_id = response.id; + match serde_json::to_string(&response) { + Ok(response) => { + let stdin = self.process_stdin.clone().unwrap(); + let fut = async move { + let mut stdin = stdin.lock().await; + stdin + .write_all(response.as_bytes()) + .await + .expect("could not write rpc response to process stdin"); + stdin + .write_all(b"\n") + .await + .expect("could not write newline to process stdin"); + }; + ctx.spawn(fut.in_current_span().into_actor(self)); + } + Err(e) => { + self.console + .error(format!("could not serialize rpc response {e}")); + } } - } + }; } } - -// impl Handler for TaskActor { -// type Result = (); -// -// fn handle( -// &mut self, -// SendRpcResponse(response): SendRpcResponse, -// _ctx: &mut Context, -// ) -> Self::Result { -// { -// let response_id = response.id; -// match serde_json::to_string(&response) { -// Ok(response) => { -// let stdin = self.process_stdin.clone().unwrap_or_log(); -// let console = self.console.clone(); -// let fut = async move { -// let stdin = stdin.lock().await; -// console.debug(format!("sending rpc response #{response_id}")); -// stdin -// .write_all(response.as_bytes()) -// .await -// .expect("could not write rpc response to process stdin"); -// stdin -// .write_all(b"\n") -// .await -// .expect("could not write newline to process stdin"); -// }; -// } -// Err(e) => { -// self.console -// .error(format!("could not serialize rpc response {e}")); -// } -// } -// }; -// } -// } diff --git a/meta-cli/src/deploy/actors/task/action.rs b/meta-cli/src/deploy/actors/task/action.rs index 1597a243e8..8b307addae 100644 --- a/meta-cli/src/deploy/actors/task/action.rs +++ b/meta-cli/src/deploy/actors/task/action.rs @@ -1,21 +1,33 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 +use super::deploy::MigrationAction; use super::TaskActor; use crate::deploy::actors::task_manager::{TaskManager, TaskRef}; use crate::interlude::*; use crate::{config::Config, deploy::actors::console::ConsoleActor}; -use std::{path::Path, sync::Arc}; +use std::sync::Arc; use tokio::process::Command; +#[derive(Debug, Clone)] +pub struct SharedActionConfig { + pub command: &'static str, + pub config_dir: Arc, + pub working_dir: Arc, + pub migrations_dir: Arc, + pub default_migration_action: MigrationAction, +} + pub trait TaskActionGenerator: Clone { type Action: TaskAction; fn generate( &self, task_ref: TaskRef, - followup: Option<::Followup>, + options: ::Options, ) -> Self::Action; + + fn get_shared_config(&self) -> Arc; } pub struct ActionFinalizeContext { @@ -29,26 +41,40 @@ pub trait OutputData: serde::de::DeserializeOwned + std::fmt::Debug + Unpin + Se fn get_typegraph_name(&self) -> String; } -pub trait FollowupTaskConfig { - fn schedule(&self, task_manager: Addr>); +#[derive(Default, Debug, Clone)] +pub enum TaskFilter { + #[default] + All, + Typegraphs(Vec), +} + +impl ToString for TaskFilter { + fn to_string(&self) -> String { + match self { + TaskFilter::All => "all".to_string(), + TaskFilter::Typegraphs(typegraphs) => format!("typegraphs={}", typegraphs.join(",")), + } + } } pub trait TaskAction: std::fmt::Debug + Clone + Send + Unpin { type SuccessData: OutputData; type FailureData: OutputData; - type Followup: FollowupTaskConfig + Default + std::fmt::Debug + Unpin + Send; + type Options: Default + std::fmt::Debug + Unpin + Send; type Generator: TaskActionGenerator + Unpin; + type RpcCall: serde::de::DeserializeOwned + std::fmt::Debug + Unpin + Send; async fn get_command(&self) -> Result; fn get_task_ref(&self) -> &TaskRef; + fn get_options(&self) -> &Self::Options; + fn get_start_message(&self) -> String; fn get_error_message(&self, err: &str) -> String; - fn get_global_config(&self) -> serde_json::Value; - fn get_typegraph_config(&self, typegraph: &str) -> serde_json::Value; - fn finalize(&self, res: &ActionResult, ctx: ActionFinalizeContext); + + async fn get_rpc_response(&self, call: &Self::RpcCall) -> Result; } pub type ActionResult = Result; diff --git a/meta-cli/src/deploy/actors/task/command.rs b/meta-cli/src/deploy/actors/task/command.rs index eb6e643fdd..b75794dc5c 100644 --- a/meta-cli/src/deploy/actors/task/command.rs +++ b/meta-cli/src/deploy/actors/task/command.rs @@ -1,51 +1,108 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use super::TaskConfig; +use crate::config::ModuleType; use crate::interlude::*; -use crate::{config::ModuleType, typegraph::loader::get_task_command}; use std::process::Stdio; use std::{path::Path, sync::Arc}; use tokio::process::Command; -pub(super) struct CommandBuilder { - pub path: PathBuf, - pub task_config: Arc, - pub action_env: &'static str, +use super::action::{SharedActionConfig, TaskFilter}; + +mod python; +mod typescript; + +pub(super) async fn build_task_command( + relative_path: impl AsRef, + shared_config: Arc, + task_filter: TaskFilter, +) -> Result { + let path = shared_config.working_dir.join(relative_path.as_ref()); + let ctx = CommandContext { + shared_config, + task_filter, + path, + }; + + ctx.ensure_file_exists().await?; + + let mut command = if let Some(command) = ctx.build_raw_from_env() { + command + } else { + match ModuleType::try_from(ctx.path.as_path())? { + ModuleType::Python => python::get_raw_command(&ctx.path).await?, + ModuleType::TypeScript | ModuleType::JavaScript => { + typescript::get_raw_command(&ctx.path).await? + } + } + }; + + ctx.setup_task(&mut command); + + Ok(command) } -impl CommandBuilder { - pub(super) async fn build(&self) -> Result { - if !tokio::fs::try_exists(&self.path) - .await - .map_err(|e| eyre::eyre!("typegraph file {:?} does not exist: {:#}", self.path, e))? - { +struct CommandContext { + shared_config: Arc, + task_filter: TaskFilter, + path: PathBuf, +} + +impl CommandContext { + async fn ensure_file_exists(&self) -> Result<()> { + if !tokio::fs::try_exists(&self.path).await.map_err(|e| { + eyre::eyre!( + "typegraph definition module {:?} does not exist: {:#}", + self.path, + e + ) + })? { return Err(eyre::eyre!( - "typegraph file {:?} does not exist", + "typegraph definition module {:?} does not exist", &self.path )); } + Ok(()) + } - let path: &Path = &self.path; - // TODO move into this file - let mut command = get_task_command( - ModuleType::try_from(path).unwrap_or_log(), + fn build_raw_from_env(&self) -> Option { + if let Ok(argv_str) = std::env::var("MCLI_LOADER_CMD") { + let argv = argv_str.split(' ').collect::>(); + let mut command = Command::new(argv[0]); + command.args(&argv[1..]).arg(self.path.to_str().unwrap()); + Some(command) + } else { + None + } + } + + fn setup_task(&self, command: &mut Command) { + let CommandContext { + shared_config, + task_filter, path, - &self.task_config.base_dir, - ) - .await - .map_err(|e| eyre::eyre!("failed to get task command: {:#}", e))?; + } = self; + command - .env("MCLI_TG_PATH", path.display().to_string()) + .current_dir(shared_config.working_dir.to_str().unwrap()) + .env("MCLI_VERSION", crate::build::PKG_VERSION) + .env("MCLI_TYPEGRAPH_PATH", path.display().to_string()) + .env("MCLI_COMMAND", shared_config.command) + .env("MCLI_FILTER", task_filter.to_string()) .env( - "MCLI_SERVER_PORT", - self.task_config.instance_port.to_string(), + "MCLI_CONFIG_DIR", + shared_config.config_dir.display().to_string(), + ) + .env( + "MCLI_WORKING_DIR", + shared_config.working_dir.display().to_string(), + ) + .env( + "MCLI_MIGRATIONS_DIR", + shared_config.migrations_dir.display().to_string(), ) - .env("MCLI_ACTION", self.action_env) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()); - - Ok(command) } } diff --git a/meta-cli/src/deploy/actors/task/command/python.rs b/meta-cli/src/deploy/actors/task/command/python.rs new file mode 100644 index 0000000000..13fc5c1b20 --- /dev/null +++ b/meta-cli/src/deploy/actors/task/command/python.rs @@ -0,0 +1,26 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + +use tokio::process::Command; + +use crate::{interlude::*, utils::ensure_venv}; + +pub(super) async fn get_raw_command(path: impl AsRef) -> Result { + ensure_venv(path.as_ref()).map_err(|e| { + eyre::eyre!( + "python venv (.venv) not found in parent directories of {:?}: {}", + path.as_ref(), + e + ) + })?; + let loader_py = std::env::var("MCLI_LOADER_PY").unwrap_or_else(|_| "python3".to_string()); + let mut loader_py = loader_py.split_whitespace(); + let mut command = Command::new(loader_py.next().unwrap()); + command + .args(loader_py) + .arg(path.as_ref().to_str().unwrap()) + .env("PYTHONUNBUFFERED", "1") + .env("PYTHONDONTWRITEBYTECODE", "1") + .env("PY_TG_COMPATIBILITY", "1"); + Ok(command) +} diff --git a/meta-cli/src/deploy/actors/task/command/typescript.rs b/meta-cli/src/deploy/actors/task/command/typescript.rs new file mode 100644 index 0000000000..a630250ff2 --- /dev/null +++ b/meta-cli/src/deploy/actors/task/command/typescript.rs @@ -0,0 +1,141 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + +use crate::interlude::*; +use tokio::process::Command; + +enum TsRuntime { + Deno, + Node, + Bun, +} + +// TODO no `x tsx` for .js or .mjs files +pub(super) async fn get_raw_command(path: impl AsRef) -> Result { + let path = path.as_ref(); + match detect_runtime(path).await? { + TsRuntime::Deno => { + log::debug!("loading typegraph using deno"); + let mut command = Command::new("deno"); + command + .arg("run") + // .arg("--unstable") + .arg("--allow-all") + .arg("--check") + .arg(path.to_str().unwrap()); + Ok(command) + } + TsRuntime::Node => { + log::debug!( + "loading typegraph using npm x tsx, make sure npm packages have been installed" + ); + let mut command = Command::new("npm"); + command + .arg("x") + .arg("--yes") + .arg("tsx") + .arg(path.to_str().unwrap()); + Ok(command) + } + TsRuntime::Bun => { + log::debug!( + "loading typegraph using bun x tsx, make sure npm packages have been installed" + ); + let mut command = Command::new("bun"); + command + .arg("x") + .arg("tsx") + .arg(path.to_str().unwrap()) + .current_dir(path.parent().unwrap()); + Ok(command) + } + } +} + +// TODO cache? +async fn detect_runtime(tg_path: &Path) -> Result { + use utils::*; + use TsRuntime::*; + + let mut maybe_parent_dir = tg_path.parent(); + // try to detect runtime in use by checking for package.json/deno.json + // files first + loop { + let Some(parent_dir) = maybe_parent_dir else { + break; + }; + log::trace!("testing for ts project manifest in {parent_dir:?}"); + if has_deno_json(parent_dir).await { + log::trace!("deno.json hit in {parent_dir:?}"); + return Ok(Deno); + } + if has_package_json(parent_dir).await { + log::trace!("package.json hit in {parent_dir:?}"); + if test_node_exec().await? { + return Ok(Node); + } + if test_bun_exec().await? { + return Ok(Bun); + } + } + maybe_parent_dir = parent_dir.parent(); + } + + // if no package manifest found, just use the first runtime found in the + // following order + if test_deno_exec().await? { + return Ok(Deno); + } + if test_node_exec().await? { + return Ok(Node); + } + if test_bun_exec().await? { + return Ok(Bun); + } + Err(ferr!("unable to find deno, node or bun runtimes")) +} + +mod utils { + use super::*; + use cached::proc_macro::cached; + + #[cached(result = true)] + pub async fn test_deno_exec() -> Result { + Ok(Command::new("deno") + .arg("--version") + .output() + .await + .map(|out| out.status.success())?) + } + + #[cached(result = true)] + pub async fn test_node_exec() -> Result { + Ok(Command::new("node") + .arg("-v") + .output() + .await + .map(|out| out.status.success())?) + } + + #[cached(result = true)] + pub async fn test_bun_exec() -> Result { + Ok(Command::new("bun") + .arg("-v") + .output() + .await + .map(|out| out.status.success())?) + } + + #[inline] + pub async fn has_deno_json(dir: &Path) -> bool { + use tokio::fs::try_exists; + matches!(try_exists(dir.join("deno.json")).await, Ok(true)) + || matches!(try_exists(dir.join("deno.jsonc")).await, Ok(true)) + } + + #[inline] + pub async fn has_package_json(dir: &Path) -> bool { + use tokio::fs::try_exists; + matches!(try_exists(dir.join("package.json")).await, Ok(true)) + } +} diff --git a/meta-cli/src/deploy/actors/task/deploy.rs b/meta-cli/src/deploy/actors/task/deploy.rs index 7135c52042..c83c8b791a 100644 --- a/meta-cli/src/deploy/actors/task/deploy.rs +++ b/meta-cli/src/deploy/actors/task/deploy.rs @@ -4,13 +4,12 @@ mod migrations; use super::action::{ - ActionFinalizeContext, ActionResult, FollowupTaskConfig, OutputData, TaskAction, - TaskActionGenerator, + ActionFinalizeContext, ActionResult, OutputData, SharedActionConfig, TaskAction, + TaskActionGenerator, TaskFilter, }; -use super::command::CommandBuilder; -use super::TaskConfig; +use super::command::build_task_command; use crate::deploy::actors::console::Console; -use crate::deploy::actors::task_manager::{TaskManager, TaskRef}; +use crate::deploy::actors::task_manager::TaskRef; use crate::interlude::*; use crate::secrets::Secrets; use color_eyre::owo_colors::OwoColorize; @@ -37,62 +36,65 @@ pub struct PrismaRuntimeId { #[derive(Debug)] pub struct DeployActionInner { task_ref: TaskRef, - task_config: Arc, - node: Arc, + task_options: DeployOptions, + shared_config: Arc, + deploy_target: Arc, secrets: Arc, - migrations_dir: Arc, - migration_actions: HashMap, - default_migration_action: MigrationAction, } #[derive(Clone)] pub struct DeployActionGenerator { - pub task_config: Arc, - pub node: Arc, - pub secrets: Arc, - pub migrations_dir: Arc, - pub default_migration_action: MigrationAction, + node: Arc, + secrets: Arc, // TODO secrets_store + shared_config: Arc, +} + +impl DeployActionGenerator { + pub fn new( + node: Arc, + secrets: Arc, + config_dir: Arc, + working_dir: Arc, + migrations_dir: Arc, + create_migrations: bool, + destructive_migrations: bool, // TODO enum { Fail, Reset, Ask } + ) -> Self { + Self { + node, + secrets, + shared_config: SharedActionConfig { + command: "deploy", + config_dir, + working_dir, + migrations_dir, + default_migration_action: MigrationAction { + apply: true, + create: create_migrations, + reset: destructive_migrations, + }, + } + .into(), + } + } } impl TaskActionGenerator for DeployActionGenerator { type Action = DeployAction; - fn generate(&self, task_ref: TaskRef, followup: Option) -> Self::Action { - let (default_migration_action, migration_actions) = if let Some(followup) = followup { - ( - Default::default(), - followup - .migrations - .into_iter() - .map(|(runtime, action_override)| { - ( - runtime, - MigrationAction { - reset: matches!( - action_override, - MigrationActionOverride::ResetDatabase - ), - ..Default::default() - }, - ) - }) - .collect(), - ) - } else { - (self.default_migration_action.clone(), HashMap::new()) - }; - + fn generate(&self, task_ref: TaskRef, task_options: DeployOptions) -> Self::Action { DeployActionInner { task_ref, - task_config: self.task_config.clone(), - node: self.node.clone(), + task_options, + shared_config: self.shared_config.clone(), + deploy_target: self.node.clone(), secrets: self.secrets.clone(), - migrations_dir: self.migrations_dir.clone(), - migration_actions, - default_migration_action, } .into() } + + fn get_shared_config(&self) -> Arc { + self.shared_config.clone() + } } #[derive(Deserialize, Debug)] @@ -138,36 +140,44 @@ impl OutputData for DeployError { } } +#[derive(Debug, Default)] +pub struct DeployOptions { + filter: TaskFilter, + migration_options: Vec<(PrismaRuntimeId, MigrationActionOverride)>, +} + #[derive(Clone, Debug)] pub enum MigrationActionOverride { ResetDatabase, } -#[derive(Debug, Default)] -pub struct FollowupDeployConfig { - pub migrations: Vec<(PrismaRuntimeId, MigrationActionOverride)>, +#[derive(Deserialize, Debug)] +#[serde(tag = "method", content = "params")] +pub enum RpcCall { + GetDeployTarget, + GetDeployData { typegraph: String }, } impl TaskAction for DeployAction { type SuccessData = DeploySuccess; type FailureData = DeployError; + type Options = DeployOptions; type Generator = DeployActionGenerator; - type Followup = FollowupDeployConfig; + type RpcCall = RpcCall; async fn get_command(&self) -> Result { - CommandBuilder { - path: self - .task_config - .base_dir - .to_path_buf() - .join(&self.task_ref.path), - task_config: self.task_config.clone(), - action_env: "deploy", - } - .build() + build_task_command( + self.task_ref.path.clone(), + self.shared_config.clone(), + self.task_options.filter.clone(), + ) .await } + fn get_options(&self) -> &Self::Options { + &self.task_options + } + fn get_start_message(&self) -> String { format!( "starting deployment process for {:?}", @@ -238,46 +248,54 @@ impl TaskAction for DeployAction { } } - fn get_global_config(&self) -> serde_json::Value { - serde_json::json!({ - "typegate": { - "endpoint": self.node.base_url, - "auth": self.node.auth, + fn get_task_ref(&self) -> &crate::deploy::actors::task_manager::TaskRef { + &self.task_ref + } + + async fn get_rpc_response(&self, call: &RpcCall) -> Result { + match call { + RpcCall::GetDeployTarget => Ok(serde_json::to_value(&self.deploy_target)?), + + RpcCall::GetDeployData { typegraph } => Ok(self.get_deploy_data(typegraph)), + } + } +} + +impl MigrationAction { + fn apply_override(mut self, action_override: &MigrationActionOverride) -> Self { + match action_override { + MigrationActionOverride::ResetDatabase => MigrationAction { + reset: true, + ..self }, - "prefix": self.node.prefix, - }) + } } +} - fn get_typegraph_config(&self, typegraph: &str) -> serde_json::Value { - let migration_actions = self - .migration_actions +impl DeployActionInner { + fn get_deploy_data(&self, typegraph: &str) -> serde_json::Value { + let default_action = &self.shared_config.default_migration_action; + let actions = self + .task_options + .migration_options .iter() - .filter_map(|(runtime, action)| { - if runtime.typegraph == typegraph { - Some((runtime.name.clone(), action.clone())) + .filter_map(|(rt, action_override)| { + if rt.typegraph == typegraph { + Some(( + rt.name.clone(), + default_action.clone().apply_override(action_override), + )) } else { None } }) .collect::>(); + // TODO hydrate secrets here + cache serde_json::json!({ "secrets": self.secrets.get(typegraph), - "artifactResolution": true, - "migrationActions": migration_actions, - "defaultMigrationAction": self.default_migration_action, - "migrationsDir": self.migrations_dir.to_path_buf().join(typegraph), + "defaultMigrationAction": default_action, + "migrationActions": actions }) } - - fn get_task_ref(&self) -> &crate::deploy::actors::task_manager::TaskRef { - &self.task_ref - } -} - -impl FollowupTaskConfig for FollowupDeployConfig { - fn schedule(&self, task_manager: Addr>) { - todo!(); - // task_manager.do_send(AddFollowupTask) - } } diff --git a/meta-cli/src/deploy/actors/task/deploy/migrations.rs b/meta-cli/src/deploy/actors/task/deploy/migrations.rs index ed90cc780d..3d17f2b645 100644 --- a/meta-cli/src/deploy/actors/task/deploy/migrations.rs +++ b/meta-cli/src/deploy/actors/task/deploy/migrations.rs @@ -3,10 +3,13 @@ use color_eyre::owo_colors::OwoColorize; -use super::{DeployAction, DeployActionInner, Migration, MigrationActionOverride, PrismaRuntimeId}; +use super::{ + DeployAction, DeployActionInner, DeployOptions, Migration, MigrationActionOverride, + PrismaRuntimeId, +}; use crate::deploy::actors::console::input::{Confirm, ConfirmHandler, Select}; use crate::deploy::actors::console::Console; -use crate::deploy::actors::task::action::ActionFinalizeContext; +use crate::deploy::actors::task::action::{ActionFinalizeContext, TaskFilter}; use crate::deploy::actors::task::TaskActor; use crate::interlude::*; @@ -243,7 +246,25 @@ impl Handler for TaskActor { type Result = (); fn handle(&mut self, msg: ResetDatabase, _: &mut Self::Context) { - self.followup_task.migrations.push(( + let options = if let Some(options) = self.followup_task_options.as_mut() { + options + } else { + self.followup_task_options = Some(DeployOptions { + filter: TaskFilter::Typegraphs(Default::default()), + migration_options: Default::default(), + }); + self.followup_task_options.as_mut().unwrap() + }; + { + let typegraphs = match options.filter { + TaskFilter::Typegraphs(ref mut typegraphs) => typegraphs, + _ => unreachable!(), + }; + if !typegraphs.contains(&msg.typegraph) { + typegraphs.push(msg.typegraph.clone()); + } + } + options.migration_options.push(( PrismaRuntimeId { typegraph: msg.typegraph.clone(), name: msg.runtime.clone(), diff --git a/meta-cli/src/deploy/actors/task/serialize.rs b/meta-cli/src/deploy/actors/task/serialize.rs index 8824bc8b52..bcaeda5876 100644 --- a/meta-cli/src/deploy/actors/task/serialize.rs +++ b/meta-cli/src/deploy/actors/task/serialize.rs @@ -2,14 +2,13 @@ // SPDX-License-Identifier: MPL-2.0 use super::action::{ - ActionFinalizeContext, ActionResult, FollowupTaskConfig, OutputData, TaskAction, - TaskActionGenerator, + ActionFinalizeContext, ActionResult, OutputData, SharedActionConfig, TaskAction, + TaskActionGenerator, TaskFilter, }; -use super::command::CommandBuilder; -use super::TaskConfig; -use crate::com::store::MigrationAction; +use super::command::build_task_command; +use super::deploy::MigrationAction; use crate::deploy::actors::console::Console; -use crate::deploy::actors::task_manager::{TaskManager, TaskRef}; +use crate::deploy::actors::task_manager::TaskRef; use crate::interlude::*; use color_eyre::owo_colors::OwoColorize; use common::typegraph::Typegraph; @@ -22,18 +21,30 @@ pub type SerializeAction = Arc; #[derive(Debug)] pub struct SerializeActionInner { task_ref: TaskRef, - task_config: Arc, + task_options: SerializeOptions, + shared_config: Arc, } #[derive(Clone)] pub struct SerializeActionGenerator { - task_config: Arc, + shared_config: Arc, } impl SerializeActionGenerator { - pub fn new(task_config: TaskConfig) -> Self { + pub fn new(config_dir: Arc, working_dir: Arc, migrations_dir: Arc) -> Self { Self { - task_config: Arc::new(task_config), + shared_config: SharedActionConfig { + command: "serialize", + config_dir, + working_dir, + migrations_dir, + default_migration_action: MigrationAction { + apply: true, + create: false, + reset: false, + }, + } + .into(), } } } @@ -41,13 +52,18 @@ impl SerializeActionGenerator { impl TaskActionGenerator for SerializeActionGenerator { type Action = SerializeAction; - fn generate(&self, task_ref: TaskRef, followup: Option<()>) -> Self::Action { + fn generate(&self, task_ref: TaskRef, task_options: SerializeOptions) -> Self::Action { SerializeActionInner { task_ref, - task_config: self.task_config.clone(), + task_options, + shared_config: self.shared_config.clone(), } .into() } + + fn get_shared_config(&self) -> Arc { + self.shared_config.clone() + } } #[derive(Deserialize, Debug)] @@ -62,36 +78,37 @@ impl OutputData for Box { } } +#[derive(Debug, Default)] +pub struct SerializeOptions { + filter: TaskFilter, +} + impl OutputData for SerializeError { fn get_typegraph_name(&self) -> String { self.typegraph.clone() } } -impl FollowupTaskConfig for () { - fn schedule(&self, _task_manager: Addr>>) {} -} - impl TaskAction for SerializeAction { type SuccessData = Box; type FailureData = SerializeError; + type Options = SerializeOptions; type Generator = SerializeActionGenerator; - type Followup = (); + type RpcCall = serde_json::Value; async fn get_command(&self) -> Result { - CommandBuilder { - path: self - .task_config - .base_dir - .to_path_buf() - .join(&self.task_ref.path), - task_config: self.task_config.clone(), - action_env: "serialize", - } - .build() + build_task_command( + self.task_ref.path.clone(), + self.shared_config.clone(), + self.task_options.filter.clone(), + ) .await } + fn get_options(&self) -> &Self::Options { + &self.task_options + } + fn get_start_message(&self) -> String { format!( "starting serialization process for {:?}", @@ -130,23 +147,11 @@ impl TaskAction for SerializeAction { } } - fn get_global_config(&self) -> serde_json::Value { - serde_json::json!({ - "typegate": None::, - "prefix": None::, - }) - } - fn get_typegraph_config(&self, typegraph: &str) -> serde_json::Value { - serde_json::json!({ - "secrets": {}, - "artifactResolution": true, // TODO?? - "migrationActions": {}, - "defaultMigrationAction": MigrationAction::default(), - "migrationsDir": ".", // TODO - }) - } - fn get_task_ref(&self) -> &crate::deploy::actors::task_manager::TaskRef { &self.task_ref } + + async fn get_rpc_response(&self, _call: &serde_json::Value) -> Result { + Err(ferr!("rpc request not supported on serialize task")) + } } diff --git a/meta-cli/src/deploy/actors/task_manager/retry_manager.rs b/meta-cli/src/deploy/actors/task_manager/retry_manager.rs new file mode 100644 index 0000000000..23d9e15ccf --- /dev/null +++ b/meta-cli/src/deploy/actors/task_manager/retry_manager.rs @@ -0,0 +1,2 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 diff --git a/meta-cli/src/deploy/actors/watcher.rs b/meta-cli/src/deploy/actors/watcher.rs index 1fab8de179..abd1be4f42 100644 --- a/meta-cli/src/deploy/actors/watcher.rs +++ b/meta-cli/src/deploy/actors/watcher.rs @@ -11,7 +11,6 @@ use crate::interlude::*; use crate::typegraph::dependency_graph::DependencyGraph; use crate::typegraph::loader::discovery::FileFilter; use common::typegraph::Typegraph; -use grep::searcher::{BinaryDetection, SearcherBuilder}; use notify_debouncer_mini::notify::{RecommendedWatcher, RecursiveMode}; use notify_debouncer_mini::{new_debouncer, notify, DebounceEventResult, Debouncer}; use pathdiff::diff_paths; @@ -176,11 +175,7 @@ impl Handler for WatcherActor { }); } } else if path.try_exists().unwrap() { - let mut searcher = SearcherBuilder::new() - .binary_detection(BinaryDetection::none()) - .build(); - - if !self.file_filter.is_excluded(&path, &mut searcher) { + if !self.file_filter.is_excluded(&path) { let rel_path = diff_paths(&path, &self.directory).unwrap(); self.console.info(format!("File modified: {rel_path:?}")); diff --git a/meta-cli/src/deploy/push/migration_resolution.rs b/meta-cli/src/deploy/push/migration_resolution.rs index d2104dbdf6..dd67260fce 100644 --- a/meta-cli/src/deploy/push/migration_resolution.rs +++ b/meta-cli/src/deploy/push/migration_resolution.rs @@ -1,55 +1,44 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use crate::interlude::*; - -use actix::prelude::*; -use owo_colors::OwoColorize; - -use crate::{ - com::store::{MigrationAction, RuntimeMigrationAction, ServerStore}, - deploy::actors::{ - console::{ - input::{ConfirmHandler, OptionLabel, SelectOption}, - Console, ConsoleActor, - }, - loader::{LoadModule, LoaderActor}, - }, -}; + +// use crate::interlude::*; + +// use crate::com::store::{MigrationAction, RuntimeMigrationAction, ServerStore}; // DatabaseReset failure -#[derive(Debug)] -pub struct ConfirmDatabaseResetRequired { - pub typegraph_path: PathBuf, - pub loader: Addr, - pub runtime_name: String, -} - -impl ConfirmHandler for ConfirmDatabaseResetRequired { - fn on_confirm(&self) { - let tg_path = self.typegraph_path.clone(); - let runtime_name = self.runtime_name.clone(); - do_force_reset(&self.loader, tg_path, runtime_name); - } -} - -// NullConstraintViolation failure - -/// Set `reset` to `true` for the specified prisma runtime + re-run the typegraph -fn do_force_reset(loader: &Addr, tg_path: PathBuf, runtime_name: String) { - // reset - let glob_cfg = ServerStore::get_migration_action_glob(); - ServerStore::set_migration_action( - tg_path.clone(), - RuntimeMigrationAction { - runtime_name, - action: MigrationAction { - reset: true, // ! - create: glob_cfg.create, - }, - }, - ); - - // reload - loader.do_send(LoadModule(tg_path.into())); -} +// #[derive(Debug)] +// pub struct ConfirmDatabaseResetRequired { +// pub typegraph_path: PathBuf, +// pub loader: Addr, +// pub runtime_name: String, +// } + +// impl ConfirmHandler for ConfirmDatabaseResetRequired { +// fn on_confirm(&self) { +// let tg_path = self.typegraph_path.clone(); +// let runtime_name = self.runtime_name.clone(); +// do_force_reset(&self.loader, tg_path, runtime_name); +// } +// } + +// // NullConstraintViolation failure + +// /// Set `reset` to `true` for the specified prisma runtime + re-run the typegraph +// fn do_force_reset(loader: &Addr, tg_path: PathBuf, runtime_name: String) { +// // reset +// let glob_cfg = ServerStore::get_migration_action_glob(); +// ServerStore::set_migration_action( +// tg_path.clone(), +// RuntimeMigrationAction { +// runtime_name, +// action: MigrationAction { +// reset: true, // ! +// create: glob_cfg.create, +// }, +// }, +// ); + +// // reload +// loader.do_send(LoadModule(tg_path.into())); +// } diff --git a/meta-cli/src/deploy/push/pusher.rs b/meta-cli/src/deploy/push/pusher.rs index 64a4891f05..38f2c89b3e 100644 --- a/meta-cli/src/deploy/push/pusher.rs +++ b/meta-cli/src/deploy/push/pusher.rs @@ -7,10 +7,8 @@ use std::time::Duration; use serde::Deserialize; -use crate::com::{responses::SDKResponse, store::ServerStore}; -use crate::deploy::actors::console::input::{Confirm, Select}; -use crate::deploy::actors::console::{Console, ConsoleActor}; -use crate::deploy::actors::loader::LoaderActor; +use crate::com::responses::SDKResponse; +use crate::deploy::actors::console::ConsoleActor; use lazy_static::lazy_static; @@ -84,19 +82,6 @@ pub struct GenericPushFailure { message: String, } -#[derive(Debug)] -#[allow(unused)] -pub struct PushResult { - name: String, - messages: Vec, - migrations: Vec, - failure: Option, - original_name: String, - console: Addr, - loader: Addr, - sdk_response: SDKResponse, -} - lazy_static! { static ref RETRY_COUNTERS: Mutex>> = Mutex::new(HashMap::new()); } diff --git a/meta-cli/src/typegraph/loader/discovery.rs b/meta-cli/src/typegraph/loader/discovery.rs index ec57f60864..9450999598 100644 --- a/meta-cli/src/typegraph/loader/discovery.rs +++ b/meta-cli/src/typegraph/loader/discovery.rs @@ -7,9 +7,6 @@ use crate::{ fs::is_hidden, }; use globset::GlobSet; -use grep::searcher::sinks::UTF8; -use grep::searcher::{BinaryDetection, SearcherBuilder}; -use grep::{regex::RegexMatcher, searcher::Searcher}; use ignore::{gitignore::Gitignore, Match, WalkBuilder}; use pathdiff::diff_paths; use std::collections::HashSet; @@ -32,10 +29,6 @@ impl Discovery { } pub async fn start(self, mut handler: impl FnMut(Result)) -> Result<()> { - let mut searcher = SearcherBuilder::new() - .binary_detection(BinaryDetection::none()) - .build(); - for result in WalkBuilder::new(self.dir.clone()) .standard_filters(true) .follow_links(true) @@ -44,7 +37,7 @@ impl Discovery { match result { Ok(entry) => { let path = entry.path(); - if !self.filter.is_excluded(path, &mut searcher) { + if !self.filter.is_excluded(path) { handler(Ok(path.to_path_buf())); } else { trace!("excluded from discovery {path:?}"); @@ -86,7 +79,8 @@ impl TryFrom<&Path> for ModuleType { fn try_from(path: &Path) -> std::result::Result { match path.extension() { - Some(ext) if ext == "ts" => Ok(ModuleType::Deno), + Some(ext) if ext == "ts" => Ok(ModuleType::TypeScript), + Some(ext) if ext == "js" || ext == "mjs" || ext == "cjs" => Ok(ModuleType::JavaScript), Some(ext) if ext == "py" => Ok(ModuleType::Python), _ => Err(ferr!( "unable to determine module type from path extension: {path:?}" @@ -101,7 +95,6 @@ pub struct GlobFilter { } struct SpecificFilters { - matcher: RegexMatcher, globs: GlobFilter, } @@ -109,35 +102,18 @@ pub struct FileFilter { base_dir: PathBuf, gitignore: Option, exclude_hidden: bool, - python: SpecificFilters, - deno: SpecificFilters, + python_filter: GlobFilter, + typescript_filter: GlobFilter, + javascript_filter: GlobFilter, } impl FileFilter { - fn python_filters(config: &TypegraphLoaderConfig) -> Result { - // soon - // let matcher = RegexMatcher::new_line_matcher("@typegraph\\(.*?\\)"); - let matcher = RegexMatcher::new_line_matcher("with\\s+[Tt]ype[Gg]raph|@typegraph\\(")?; - - Ok(SpecificFilters { - matcher, - globs: GlobFilter { - include_set: config.get_include_set()?, - exclude_set: config.get_exclude_set()?, - }, - }) - } - - fn deno_filters(config: &TypegraphLoaderConfig) -> Result { - Ok(SpecificFilters { - matcher: RegexMatcher::new_line_matcher("^(await\\s+)?typegraph\\(")?, - globs: GlobFilter { - include_set: config.get_include_set()?, - exclude_set: config.get_exclude_set()?, - }, + fn get_glob_filter(config: &TypegraphLoaderConfig) -> Result { + Ok(GlobFilter { + include_set: config.get_include_set()?, + exclude_set: config.get_exclude_set()?, }) } - pub fn new(config: &Config) -> Result { let ignore = config.base_dir.join(".gitignore"); let gitignore = if ignore.exists() { @@ -150,12 +126,13 @@ impl FileFilter { base_dir: config.base_dir.clone(), gitignore, exclude_hidden: true, - python: Self::python_filters(config.loader(ModuleType::Python))?, - deno: Self::deno_filters(config.loader(ModuleType::Deno))?, + python_filter: Self::get_glob_filter(config.loader(ModuleType::Python))?, + typescript_filter: Self::get_glob_filter(config.loader(ModuleType::TypeScript))?, + javascript_filter: Self::get_glob_filter(config.loader(ModuleType::JavaScript))?, }) } - pub fn is_excluded(&self, path: &Path, searcher: &mut Searcher) -> bool { + pub fn is_excluded(&self, path: &Path) -> bool { if path.is_dir() { return true; } @@ -176,73 +153,28 @@ impl FileFilter { } match ModuleType::try_from(path) { - Ok(ModuleType::Python) => self.is_python_module_excluded(path, &rel_path, searcher), - - Ok(ModuleType::Deno) => self.is_deno_module_excluded(path, &rel_path, searcher), + Ok(ModuleType::Python) => { + self.is_excluded_by_filter(path, &rel_path, &self.python_filter) + } + Ok(ModuleType::TypeScript) => { + self.is_excluded_by_filter(path, &rel_path, &self.typescript_filter) + } + Ok(ModuleType::JavaScript) => { + self.is_excluded_by_filter(path, &rel_path, &self.javascript_filter) + } Err(_) => true, } } - fn is_python_module_excluded( - &self, - path: &Path, - rel_path: &Path, - searcher: &mut Searcher, - ) -> bool { - let globs = &self.python.globs; - - if !globs.include_set.is_empty() && !globs.include_set.is_match(rel_path) { + fn is_excluded_by_filter(&self, path: &Path, rel_path: &Path, filter: &GlobFilter) -> bool { + if !filter.include_set.is_empty() && !filter.include_set.is_match(rel_path) { return true; } - if !globs.exclude_set.is_empty() && globs.exclude_set.is_match(rel_path) { + if !filter.exclude_set.is_empty() && filter.exclude_set.is_match(rel_path) { return true; } - let mut ret = true; - searcher - .search_path( - &self.python.matcher, - path, - UTF8(|_, _| { - ret = false; - Ok(true) - }), - ) - .unwrap(); - - ret - } - - fn is_deno_module_excluded( - &self, - path: &Path, - rel_path: &Path, - searcher: &mut Searcher, - ) -> bool { - let globs = &self.deno.globs; - - if !globs.include_set.is_empty() && !globs.include_set.is_match(rel_path) { - return true; - } - - if !globs.exclude_set.is_empty() && globs.exclude_set.is_match(rel_path) { - return true; - } - - let mut ret = true; - - searcher - .search_path( - &self.deno.matcher, - path, - UTF8(|_, _| { - ret = false; - Ok(true) - }), - ) - .unwrap(); - - ret + false } } diff --git a/meta-cli/src/typegraph/loader/mod.rs b/meta-cli/src/typegraph/loader/mod.rs index 9a8b2a7d4f..2dfb7afe36 100644 --- a/meta-cli/src/typegraph/loader/mod.rs +++ b/meta-cli/src/typegraph/loader/mod.rs @@ -1,514 +1,4 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use crate::interlude::*; -use actix::Arbiter; -use eyre::{eyre, Error}; pub mod discovery; - -pub use discovery::Discovery; -use owo_colors::OwoColorize; -use tokio::{ - io::AsyncReadExt, - process::Command, - sync::{Semaphore, SemaphorePermit}, - time::{timeout, Duration}, -}; - -use crate::{ - com::{responses::SDKResponse, server::get_instance_port, store::ServerStore}, - config::ModuleType, - utils::ensure_venv, -}; - -#[derive(Debug, Clone)] -pub struct TypegraphInfos { - pub path: PathBuf, - pub base_path: PathBuf, -} - -impl TypegraphInfos { - pub fn get_responses_or_fail(&self) -> Result>> { - ServerStore::get_responses_or_fail(&self.path) - } - - pub fn get_key(&self) -> Result { - let path = self - .path - .to_str() - .ok_or_else(|| anyhow::anyhow!("typegraph path is not valid unicode"))?; - Ok(path.to_string()) - } -} - -pub type LoaderResult = Result; - -pub struct LoaderPool { - base_dir: PathBuf, - semaphore: Semaphore, -} - -pub struct Loader<'a> { - base_dir: PathBuf, - #[allow(dead_code)] - permit: SemaphorePermit<'a>, -} - -impl LoaderPool { - pub fn new(base_dir: PathBuf, max_parallel_loads: usize) -> Self { - Self { - base_dir, - semaphore: Semaphore::new(max_parallel_loads), - } - } - - pub async fn get_loader(&self) -> Result> { - Ok(Loader { - base_dir: self.base_dir.clone(), - permit: self.semaphore.acquire().await?, - }) - } -} - -impl<'a> Loader<'a> { - #[tracing::instrument(skip(self))] - pub async fn load_module(&self, path: Arc) -> LoaderResult { - match tokio::fs::try_exists(path.as_ref()).await { - Ok(exists) => { - if !exists { - return Err(LoaderError::ModuleFileNotFound { path }); - } - } - Err(err) => { - return Err(LoaderError::Unknown { - path, - error: ferr!("failed to check if file exists").error(err), - }); - } - } - let command = Self::get_load_command( - ModuleType::try_from(path.as_path()).unwrap_or_log(), - &path, - &self.base_dir, - ) - .await?; - debug!(?path, "loading module"); - self.load_command(command, &path).await - } - - #[tracing::instrument(skip(self))] - async fn load_command(&self, mut command: Command, path: &Path) -> LoaderResult { - let path: Arc = path.to_path_buf().into(); - - command - .env("META_CLI_TG_PATH", path.display().to_string()) - .env("META_CLI_SERVER_PORT", get_instance_port().to_string()) - .stdout(std::process::Stdio::piped()) - .stderr(std::process::Stdio::piped()); - - use process_wrap::tokio::*; - let mut child = TokioCommandWrap::from(command) - .wrap(KillOnDrop) - // we use sessions so that kill on drop - // signals will get all grand-children - .wrap(ProcessSession) - .spawn() - .map_err(|err| LoaderError::LoaderProcess { - path: path.clone(), - error: err.into(), - })?; - - // let stderr = child.stderr().ok_or_else(|| LoaderError::LoaderProcess { - // path: path.clone(), - // error: eyre!("could not get stderr from loader process"), - // })?; - // Arbiter::current().spawn(async move { - // // - // }); - - let duration = - get_loader_timeout_duration().map_err(|err| LoaderError::Other { error: err })?; - match timeout(duration, Box::into_pin(child.wait())).await { - Err(_) => { - Box::into_pin(child.kill()).await.unwrap_or_log(); - Err(LoaderError::LoaderTimeout { path: path.clone() }) - } - Ok(exit) => { - let exit = exit.map_err(|e| LoaderError::LoaderProcess { - path: path.clone(), - error: e.into(), // generic - })?; - if exit.success() { - Ok(TypegraphInfos { - path: path.as_ref().to_owned(), - base_path: self.base_dir.clone(), - }) - } else { - let stdout = match child.stdout().take().as_mut() { - Some(value) => { - let mut buff = String::new(); - value.read_to_string(&mut buff).await.map_err(|e| { - LoaderError::LoaderProcess { - path: path.clone(), - error: e.into(), - } - })?; - buff.to_owned() - } - None => "".to_string(), - }; - - use color_eyre::SectionExt; - Err(LoaderError::LoaderProcess { - path: path.clone(), - error: ferr!("loader process err") - .section(stdout.trim().to_string().header("Stdout:")) - .suppress_backtrace(true), - }) - } - } - } - } - - #[tracing::instrument(err)] - async fn get_load_command( - module_type: ModuleType, - path: &Path, - base_dir: &Path, - ) -> Result { - if let Ok(argv_str) = std::env::var("MCLI_LOADER_CMD") { - let argv = argv_str.split(' ').collect::>(); - let mut command = Command::new(argv[0]); - command - .args(&argv[1..]) - .arg(path.to_str().unwrap()) - .arg(base_dir); - return Ok(command); - } - - match module_type { - ModuleType::Python => { - ensure_venv(path).map_err(|e| LoaderError::PythonVenvNotFound { - path: path.to_owned().into(), - error: e, - })?; - let loader_py = - std::env::var("MCLI_LOADER_PY").unwrap_or_else(|_| "python3".to_string()); - let mut loader_py = loader_py.split_whitespace(); - let mut command = Command::new(loader_py.next().unwrap()); - command - .args(loader_py) - .arg(path.to_str().unwrap()) - .current_dir(base_dir) - .env("PYTHONUNBUFFERED", "1") - .env("PYTHONDONTWRITEBYTECODE", "1") - .env("PY_TG_COMPATIBILITY", "1"); - Ok(command) - } - ModuleType::Deno => { - // TODO cache result? - match detect_deno_loader_cmd(path) - .await - .map_err(|error| LoaderError::Unknown { - path: path.to_path_buf().into(), - error, - })? { - TsLoaderRt::Deno => { - log::debug!("loading typegraph using deno"); - let mut command = Command::new("deno"); - command - .arg("run") - // .arg("--unstable") - .arg("--allow-all") - .arg("--check") - .arg(path.to_str().unwrap()) - .current_dir(base_dir); - Ok(command) - } - TsLoaderRt::Node => { - log::debug!("loading typegraph using npm x tsx, make sure npm packages have been installed"); - let mut command = Command::new("npm"); - command - .arg("x") - .arg("--yes") - .arg("tsx") - .current_dir(path.parent().unwrap()) - .arg(path.to_str().unwrap()); - Ok(command) - } - TsLoaderRt::Bun => { - log::debug!("loading typegraph using bun x tsx, make sure npm packages have been installed"); - let mut command = Command::new("bun"); - command - .arg("x") - .arg("tsx") - .arg(path.to_str().unwrap()) - .current_dir(path.parent().unwrap()); - Ok(command) - } - } - } - } - } -} - -#[tracing::instrument(err)] -pub async fn get_task_command( - module_type: ModuleType, - path: &Path, - base_dir: &Path, -) -> Result { - if let Ok(argv_str) = std::env::var("MCLI_LOADER_CMD") { - let argv = argv_str.split(' ').collect::>(); - let mut command = Command::new(argv[0]); - command - .args(&argv[1..]) - .arg(path.to_str().unwrap()) - .arg(base_dir); - return Ok(command); - } - - match module_type { - ModuleType::Python => { - ensure_venv(path).map_err(|e| LoaderError::PythonVenvNotFound { - path: path.to_owned().into(), - error: e, - })?; - let loader_py = - std::env::var("MCLI_LOADER_PY").unwrap_or_else(|_| "python3".to_string()); - let mut loader_py = loader_py.split_whitespace(); - let mut command = Command::new(loader_py.next().unwrap()); - command - .args(loader_py) - .arg(path.to_str().unwrap()) - .current_dir(base_dir) - .env("PYTHONUNBUFFERED", "1") - .env("PYTHONDONTWRITEBYTECODE", "1") - .env("PY_TG_COMPATIBILITY", "1"); - Ok(command) - } - ModuleType::Deno => { - // TODO cache result? - match detect_deno_loader_cmd(path) - .await - .map_err(|error| LoaderError::Unknown { - path: path.to_path_buf().into(), - error, - })? { - TsLoaderRt::Deno => { - log::debug!("loading typegraph using deno"); - let mut command = Command::new("deno"); - command - .arg("run") - // .arg("--unstable") - .arg("--allow-all") - .arg("--check") - .arg(path.to_str().unwrap()) - .current_dir(base_dir); - Ok(command) - } - TsLoaderRt::Node => { - log::debug!("loading typegraph using npm x tsx, make sure npm packages have been installed"); - let mut command = Command::new("npm"); - command - .arg("x") - .arg("--yes") - .arg("tsx") - .current_dir(path.parent().unwrap()) - .arg(path.to_str().unwrap()); - Ok(command) - } - TsLoaderRt::Bun => { - log::debug!("loading typegraph using bun x tsx, make sure npm packages have been installed"); - let mut command = Command::new("bun"); - command - .arg("x") - .arg("tsx") - .arg(path.to_str().unwrap()) - .current_dir(path.parent().unwrap()); - Ok(command) - } - } - } - } -} - -enum TsLoaderRt { - Deno, - Node, - Bun, -} -async fn detect_deno_loader_cmd(tg_path: &Path) -> Result { - use TsLoaderRt::*; - let test_deno_exec = || async { - Command::new("deno") - .arg("--version") - .output() - .await - .map(|out| out.status.success()) - }; - let test_node_exec = || async { - Command::new("node") - .arg("-v") - .output() - .await - .map(|out| out.status.success()) - }; - let test_bun_exec = || async { - Command::new("deno") - .arg("--version") - .output() - .await - .map(|out| out.status.success()) - }; - let mut maybe_parent_dir = tg_path.parent(); - // try to detect runtime in use by checking for package.json/deno.json - // files first - loop { - let Some(parent_dir) = maybe_parent_dir else { - break; - }; - use tokio::fs::try_exists; - log::trace!("testing for ts project manifest in {parent_dir:?}"); - if matches!(try_exists(parent_dir.join("deno.json")).await, Ok(true)) - || matches!(try_exists(parent_dir.join("deno.jsonc")).await, Ok(true)) - { - log::trace!("deno.json hit in {parent_dir:?}"); - return Ok(Deno); - } - if matches!(try_exists(parent_dir.join("package.json")).await, Ok(true)) { - log::trace!("package.json hit in {parent_dir:?}"); - // TODO: cache the test values without making a spaghetti mess - // lazy async result values are hard to Once/LazyCell :/ - if test_node_exec().await? { - return Ok(Node); - } - if test_bun_exec().await? { - return Ok(Bun); - } - } - maybe_parent_dir = parent_dir.parent(); - } - // if no package manifest found, just use the first runtime found in the - // following order - if test_deno_exec().await? { - return Ok(Deno); - } - if test_node_exec().await? { - return Ok(Node); - } - if test_bun_exec().await? { - return Ok(Bun); - } - Err(ferr!("unable to find deno, node or bun runtimes")) -} - -#[allow(unused)] -#[derive(Debug)] -pub enum LoaderError { - PostProcessingError { - path: Arc, - typegraph_name: String, - error: Error, - }, - SerdeJson { - path: Arc, - content: String, - error: serde_json::Error, - }, - LoaderProcess { - path: Arc, - error: Error, - }, - LoaderTimeout { - path: Arc, - }, - Other { - error: Error, - }, - ModuleFileNotFound { - path: Arc, - }, - Unknown { - path: Arc, - error: Error, - }, - PythonVenvNotFound { - path: Arc, - error: Error, - }, -} - -impl core::fmt::Display for LoaderError { - fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::PostProcessingError { - path, - typegraph_name, - error, - } => { - write!( - fmt, - "error while post processing typegraph {name} from {path:?}: {error:?}", - name = typegraph_name.blue() - ) - } - Self::SerdeJson { - path, - content, - error, - } => { - write!( - fmt, - "error while parsing raw typegraph JSON from {path:?}: {error:?} in {content:?}") - } - Self::LoaderProcess { path, error } => { - write!( - fmt, - "loader process error while loading typegraph(s) from {path:?}: {error:?}" - ) - } - Self::LoaderTimeout { path } => { - write!( - fmt, - "loader process timed out while loading typegraph(s) from {path:?}" - ) - } - Self::Other { error } => { - write!(fmt, "unknown error: {error:?}") - } - Self::Unknown { path, error } => { - write!( - fmt, - "unknown error while loading typegraph(s) from {path:?}: {error:?}" - ) - } - Self::ModuleFileNotFound { path } => { - write!(fmt, "module file not found: {path:?}") - } - Self::PythonVenvNotFound { path, error } => { - write!( - fmt, - "python venv (.venv) not found in parent directories of {path:?}: {error:?}", - ) - } - } - } -} - -fn get_loader_timeout_duration() -> Result { - let env_key = "LOADER_TIMEOUT"; - let secs = match std::env::var(env_key) { - Ok(value) => { - let value = value - .parse::() - .context(format!("{env_key} is not a positive integer"))?; - if value < 1 { - bail!("{env_key:?} cannot be less than 1"); - } - value - } - Err(_) => 120, - }; - Ok(Duration::from_secs(secs)) -} diff --git a/typegate/tests/e2e/cli/dev_test.ts b/typegate/tests/e2e/cli/dev_test.ts index 4e3f9f686d..e0262b5709 100644 --- a/typegate/tests/e2e/cli/dev_test.ts +++ b/typegate/tests/e2e/cli/dev_test.ts @@ -12,7 +12,6 @@ import { $ } from "dax"; const m = new TestModule(import.meta); const tgName = "migration-failure-test"; - /** * These tests use different ports for the virtual typegate instance to avoid * conflicts with one another when running in parallel. @@ -32,99 +31,102 @@ async function writeTypegraph(version: number | null, target = "migration.py") { } } -Meta.test({ - name: "meta dev: choose to reset the database", +Meta.test.only( + { + name: "meta dev: choose to reset the database", - gitRepo: { - content: { - "metatype.yml": "metatype.yml", + gitRepo: { + content: { + "metatype.yml": "metatype.yml", + }, }, }, -}, async (t) => { - const schema = randomSchema(); - const tgDefPath = join(t.workingDir, "migration.py"); - - await t.should("load first version of the typegraph", async () => { - await reset(tgName, schema); - await writeTypegraph(null, tgDefPath); - }); - - const metadev = await MetaDev.start({ - cwd: t.workingDir, - args: [ - "dev", - "--target=dev", - `--gate=http://localhost:${t.port}`, - "--secret", - `migration-failure-test:POSTGRES=postgresql://postgres:password@localhost:5432/db?schema=${schema}`, - ], - }); - - await metadev.fetchStderrLines((line) => { - console.log("line:", line); - return !$.stripAnsi(line).includes( - "Successfully pushed typegraph migration-failure-test", - ); - }); + async (t) => { + const schema = randomSchema(); + const tgDefPath = join(t.workingDir, "migration.py"); + + await t.should("load first version of the typegraph", async () => { + await reset(tgName, schema); + await writeTypegraph(null, tgDefPath); + }); + + const metadev = await MetaDev.start({ + cwd: t.workingDir, + args: [ + "dev", + "--target=dev", + `--gate=http://localhost:${t.port}`, + "--secret", + `migration-failure-test:POSTGRES=postgresql://postgres:password@localhost:5432/db?schema=${schema}`, + ], + }); - await t.should("insert records", async () => { - const e = t.getTypegraphEngine(tgName); - if (!e) { - throw new Error("typegraph not found"); - } - await gql` - mutation { - createRecord(data: {}) { - id - } - } - ` - .expectData({ - createRecord: { - id: 1, - }, - }) - .on(e); - }); - - await t.should("load second version of the typegraph", async () => { - await writeTypegraph(1, tgDefPath); await metadev.fetchStderrLines((line) => { console.log("line:", line); - return !line.includes("[select]"); + return !$.stripAnsi(line).includes( + "successfully deployed typegraph migration-failure-test from migration.py", + ); }); - await metadev.writeLine("3"); - }); + await t.should("insert records", async () => { + const e = t.getTypegraphEngine(tgName); + if (!e) { + throw new Error("typegraph not found"); + } + await gql` + mutation { + createRecord(data: {}) { + id + } + } + ` + .expectData({ + createRecord: { + id: 1, + }, + }) + .on(e); + }); - await metadev.fetchStderrLines((line) => { - console.log("line:", line); - return !$.stripAnsi(line).includes( - "Successfully pushed typegraph migration-failure-test", - ); - }); + await t.should("load second version of the typegraph", async () => { + await writeTypegraph(1, tgDefPath); + await metadev.fetchStderrLines((line) => { + console.log("line:", line); + return !line.includes("[select]"); + }); - await t.should("database be empty", async () => { - const e = t.getTypegraphEngine(tgName); - if (!e) { - throw new Error("typegraph not found"); - } - await gql` - query { - findRecords { - id - age - } - } - ` - .expectData({ - findRecords: [], - }) - .on(e); - }); + await metadev.writeLine("3"); + }); - await metadev.close(); -}); + // await metadev.fetchStderrLines((line) => { + // console.log("line:", line); + // return !$.stripAnsi(line).includes( + // "Successfully pushed typegraph migration-failure-test", + // ); + // }); + + // await t.should("database be empty", async () => { + // const e = t.getTypegraphEngine(tgName); + // if (!e) { + // throw new Error("typegraph not found"); + // } + // await gql` + // query { + // findRecords { + // id + // age + // } + // } + // ` + // .expectData({ + // findRecords: [], + // }) + // .on(e); + // }); + + await metadev.close(); + }, +); async function listSubdirs(path: string): Promise { const subdirs: string[] = []; @@ -136,99 +138,100 @@ async function listSubdirs(path: string): Promise { return subdirs; } -Meta.test({ - name: "meta dev: remove latest migration", +Meta.test( + { + name: "meta dev: remove latest migration", - gitRepo: { - content: { - "metatype.yml": "metatype.yml", + gitRepo: { + content: { + "metatype.yml": "metatype.yml", + }, }, }, -}, async (t) => { - const schema = randomSchema(); - const tgDefFile = join(t.workingDir, "migration.py"); + async (t) => { + const schema = randomSchema(); + const tgDefFile = join(t.workingDir, "migration.py"); + + await t.should("have no migration file", async () => { + await assertRejects(() => + Deno.lstat(resolve(t.workingDir, "prisma-migrations")) + ); + }); - await t.should("have no migration file", async () => { - await assertRejects(() => - Deno.lstat(resolve(t.workingDir, "prisma-migrations")) - ); - }); - - await t.should("load first version of the typegraph", async () => { - await reset(tgName, schema); - await writeTypegraph(null, tgDefFile); - }); - - const metadev = await MetaDev.start({ - cwd: t.workingDir, - args: [ - "dev", - "--target=dev", - `--gate=http://localhost:${t.port}`, - `--secret=migration-failure-test:POSTGRES=postgresql://postgres:password@localhost:5432/db?schema=${schema}`, - ], - }); - - await metadev.fetchStderrLines((line) => { - console.log("line:", line); - return !$.stripAnsi(line).includes( - "Successfully pushed typegraph migration-failure-test", - ); - }); + await t.should("load first version of the typegraph", async () => { + await reset(tgName, schema); + await writeTypegraph(null, tgDefFile); + }); - await t.should("have created migration", async () => { - await Deno.lstat(resolve(t.workingDir, "prisma-migrations")); - }); + const metadev = await MetaDev.start({ + cwd: t.workingDir, + args: [ + "dev", + "--target=dev", + `--gate=http://localhost:${t.port}`, + `--secret=migration-failure-test:POSTGRES=postgresql://postgres:password@localhost:5432/db?schema=${schema}`, + ], + }); - await t.should("insert records", async () => { - const e = t.getTypegraphEngine(tgName); - if (!e) { - throw new Error("typegraph not found"); - } - await gql` - mutation { - createRecord(data: {}) { - id - } - } - ` - .expectData({ - createRecord: { - id: 1, - }, - }) - .on(e); - }); - - const migrationsDir = resolve( - t.workingDir, - "prisma-migrations", - "migration-failure-test/main", - ); - console.log("Typegate migration dir", migrationsDir); - - await t.should("load second version of the typegraph", async () => { - await writeTypegraph(1, tgDefFile); await metadev.fetchStderrLines((line) => { console.log("line:", line); - return !line.includes("[select]"); + return !$.stripAnsi(line).includes( + "Successfully pushed typegraph migration-failure-test", + ); }); - assert((await listSubdirs(migrationsDir)).length === 2); + await t.should("have created migration", async () => { + await Deno.lstat(resolve(t.workingDir, "prisma-migrations")); + }); - await metadev.writeLine("1"); - }); + await t.should("insert records", async () => { + const e = t.getTypegraphEngine(tgName); + if (!e) { + throw new Error("typegraph not found"); + } + await gql` + mutation { + createRecord(data: {}) { + id + } + } + ` + .expectData({ + createRecord: { + id: 1, + }, + }) + .on(e); + }); - await metadev.fetchStderrLines((line) => { - console.log("line:", line); - return !line.includes( - "Removed migration directory", + const migrationsDir = resolve( + t.workingDir, + "prisma-migrations", + "migration-failure-test/main", ); - }); + console.log("Typegate migration dir", migrationsDir); - await t.should("have removed latest migration", async () => { - assert((await listSubdirs(migrationsDir)).length === 1); - }); + await t.should("load second version of the typegraph", async () => { + await writeTypegraph(1, tgDefFile); + await metadev.fetchStderrLines((line) => { + console.log("line:", line); + return !line.includes("[select]"); + }); - await metadev.close(); -}); + assert((await listSubdirs(migrationsDir)).length === 2); + + await metadev.writeLine("1"); + }); + + await metadev.fetchStderrLines((line) => { + console.log("line:", line); + return !line.includes("Removed migration directory"); + }); + + await t.should("have removed latest migration", async () => { + assert((await listSubdirs(migrationsDir)).length === 1); + }); + + await metadev.close(); + }, +); diff --git a/typegraph/core/src/utils/metagen_utils.rs b/typegraph/core/src/utils/metagen_utils.rs index 47d31cd8fb..06e0b12591 100644 --- a/typegraph/core/src/utils/metagen_utils.rs +++ b/typegraph/core/src/utils/metagen_utils.rs @@ -17,7 +17,7 @@ impl InputResolverSync for RawTgResolver { match order { GeneratorInputOrder::TypegraphFromTypegate { .. } => { Ok(GeneratorInputResolved::TypegraphFromTypegate { - raw: self.tg.clone(), + raw: self.tg.clone().into(), }) } GeneratorInputOrder::TypegraphFromPath { .. } => unimplemented!(), diff --git a/typegraph/node/sdk/src/tg_artifact_upload.ts b/typegraph/node/sdk/src/tg_artifact_upload.ts index 58fb970e31..8cccd8e330 100644 --- a/typegraph/node/sdk/src/tg_artifact_upload.ts +++ b/typegraph/node/sdk/src/tg_artifact_upload.ts @@ -54,7 +54,8 @@ export class ArtifactUploader { // const uploadUrls: Array = await response.json(); if (uploadUrls.length !== artifactMetas.length) { - const diff = `array length mismatch: ${uploadUrls.length} !== ${artifactMetas.length}`; + const diff = + `array length mismatch: ${uploadUrls.length} !== ${artifactMetas.length}`; throw new Error(`Failed to get upload URLs for all artifacts: ${diff}`); } diff --git a/typegraph/python/typegraph/deploy/request.py b/typegraph/python/typegraph/deploy/request.py new file mode 100644 index 0000000000..819cfc44d5 --- /dev/null +++ b/typegraph/python/typegraph/deploy/request.py @@ -0,0 +1,32 @@ +# Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +# SPDX-License-Identifier: MPL-2.0 + +from enum import Enum +from typing import Optional, Any + +from typegraph.python.typegraph.graph.shared_types import BasicAuth +from typegraph.python.typegraph.io import DeployTarget + + +class Encoding(Enum): + JSON = "application/json" + BINARY = "application/octet-stream" + + +class Typegate: + base_url: str + auth: Optional[BasicAuth] + + def __init__(self, target: DeployTarget): + self.base_url = target.base_url + self.auth = target.auth + + def exec_request( + self, + path: str, + *, + method: str, + body: Optional[Any], + encoding: Encoding = Encoding.JSON, + ): + pass diff --git a/typegraph/python/typegraph/envs/cli.py b/typegraph/python/typegraph/envs/cli.py new file mode 100644 index 0000000000..a09a126128 --- /dev/null +++ b/typegraph/python/typegraph/envs/cli.py @@ -0,0 +1,89 @@ +# Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +# SPDX-License-Identifier: MPL-2.0 + +from dataclasses import dataclass +from typing import Optional, List +from os import environ +from enum import Enum + +from typegraph.io import Log + +_required_cli_envs = ( + "version", + "command", + "typegraph_path", + "filter", + "config_dir", + "working_dir", + "migrations_dir", +) + +_optional_cli_envs = ("prefix",) + + +class Command(Enum): + SERIALIZE = "serialize" + DEPLOY = "deploy" + + +@dataclass +class CliEnv: + version: str + command: Command + typegraph_path: str + filter: Optional[List[str]] + prefix: Optional[str] + config_dir: str + working_dir: str + migrations_dir: str + + @classmethod + def load(cls) -> Optional["CliEnv"]: + d = {} + missing = [] + + for key in _required_cli_envs: + env_name = "MCLI_" + key.upper() + value = environ.get(env_name) + if value is None: + missing.append(env_name) + else: + d[key] = value + + if len(missing) != 0: + if len(d) != 0: + raise Exception(f"required environment variables: {', '.join(missing)}") + else: + return None + + for key in _optional_cli_envs: + env_name = "MCLI_" + key.upper() + d[key] = environ.get(env_name) + + try: + d["command"] = Command(d["command"]) + except ValueError as e: + variants = ", ".join(v.value for v in Command) + raise Exception(f"MCLI_COMMAND env value should be one of: {variants}; {e}") + + raw_filter: str = d["filter"] + if raw_filter == "all": + filter = None + else: + if not raw_filter.startswith("typegraph="): + raise Exception(f"invalid MCLI_FILTER env value: {raw_filter}") + filter = raw_filter.removeprefix("typegraph=").split(",") + d["filter"] = filter + + Log.debug(d) + + return cls(**d) + + +CLI_ENV = CliEnv.load() + + +def get_cli_env(): + if CLI_ENV is None: + raise Exception("cannot be called on this context") + return CLI_ENV diff --git a/typegraph/python/typegraph/graph/tg_manage.py b/typegraph/python/typegraph/graph/tg_manage.py index 15e8cb4849..61b4f348d5 100644 --- a/typegraph/python/typegraph/graph/tg_manage.py +++ b/typegraph/python/typegraph/graph/tg_manage.py @@ -1,125 +1,106 @@ # Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. # SPDX-License-Identifier: MPL-2.0 -import os import traceback -from enum import Enum -from typing import Union, Optional from typegraph.gen.exports.core import ( FinalizeParams, + MigrationAction, PrismaMigrationConfig, ) from typegraph.graph.shared_types import TypegraphOutput -from typegraph.graph.tg_deploy import TypegateConnectionOptions, TypegraphDeployParams, tg_deploy -from typegraph.utils import freeze_tg_output -from typegraph.io import Log, GlobalConfig, Rpc, TypegraphConfig - -PORT = "MCLI_SERVER_PORT" # meta-cli instance that executes the current file -SELF_PATH = ( - "MCLI_TG_PATH" # path to the current file to uniquely identify the run results +from typegraph.graph.tg_deploy import ( + TypegateConnectionOptions, + TypegraphDeployParams, + tg_deploy, ) +from typegraph.utils import freeze_tg_output +from typegraph.io import Log, Rpc +from typegraph.envs.cli import CliEnv, Command, get_cli_env -class Command(Enum): - SERIALIZE = "serialize" - DEPLOY = "deploy" - -_env_command = os.environ.get("MCLI_ACTION") -command = None -if _env_command is not None: - if _env_command not in [Command.SERIALIZE.value, Command.DEPLOY.value]: - raise Exception(f"MCLI_ACTION env variable must be one of {Command.SERIALIZE.value}, {Command.DEPLOY.value}") - command = Command(_env_command) - - -_global_config: Optional[GlobalConfig] = None -def get_global_config(): - global _global_config - if _global_config is None: - _global_config = Rpc.get_global_config() - return _global_config class Manager: typegraph: TypegraphOutput - typegraph_path: str - typegraph_config: TypegraphConfig - global_config: GlobalConfig - - def is_run_from_cli() -> bool: - return os.environ.get("MCLI_ACTION") is not None + env: CliEnv - def __init__(self, typegraph: TypegraphOutput, port: Union[None, int] = None): + def __init__(self, typegraph: TypegraphOutput): self.typegraph = typegraph - tg_path = os.environ.get(SELF_PATH) - if tg_path is None: - raise Exception(f"{SELF_PATH} env variable not set") - self.typegraph_path = tg_path - self.global_config = get_global_config() - self.typegraph_config = Rpc.get_typegraph_config(typegraph.name) + self.env = get_cli_env() def run(self): + if self.env.command == Command.SERIALIZE: + self.serialize() + elif self.env.command == Command.DEPLOY: + self.deploy() + else: + raise Exception("unreachable") + + def serialize(self): + env = self.env params = FinalizeParams( - typegraph_path=self.typegraph_path, - prefix=self.global_config.prefix, + typegraph_path=env.typegraph_path, + prefix=env.prefix, artifact_resolution=True, codegen=False, prisma_migration=PrismaMigrationConfig( - migrations_dir=self.typegraph_config.migrations_dir, - migration_actions=[(k, v) for k, v in self.typegraph_config.migration_actions.items()], - default_migration_action=self.typegraph_config.default_migration_action - ) + migrations_dir=env.migrations_dir, + migration_actions=[], + default_migration_action=MigrationAction( + apply=True, + create=False, + reset=False, + ), + ), ) - if command is None: - raise Exception("MCLI_ACTION env variable required") - elif command == Command.SERIALIZE: - self.serialize(params) - elif command == Command.DEPLOY: - self.deploy(params) - else: - raise Exception(f"command {command.value} not supported") - - def serialize(self, config: FinalizeParams): try: - res = self.typegraph.serialize(config) + res = self.typegraph.serialize(params) Log.success(res.tgJson, noencode=True) except Exception as err: Log.debug(traceback.format_exc()) Log.failure({"typegraph": self.typegraph.name, "error": str(err)}) - def deploy(self, config: FinalizeParams): - typegate = self.global_config.typegate - if typegate is None: - raise Exception("unexpected") - if typegate.auth is None: - raise Exception( - f'{self.typegraph.name}" received null or undefined "auth" field on the configuration' - ) + def deploy(self): + env = self.env + deploy_data = Rpc.get_deploy_data(self.typegraph.name) + + params = FinalizeParams( + typegraph_path=env.typegraph_path, + prefix=env.prefix, + artifact_resolution=True, + codegen=False, + prisma_migration=PrismaMigrationConfig( + migrations_dir=env.migrations_dir, + migration_actions=list(deploy_data.migration_actions.items()), + default_migration_action=deploy_data.default_migration_action, + ), + ) # hack for allowing tg.serialize(config) to be called more than once - frozen_out = freeze_tg_output(config, self.typegraph) + frozen_out = freeze_tg_output(params, self.typegraph) try: - frozen_serialized = frozen_out.serialize(config) # noqa + frozen_serialized = frozen_out.serialize(params) # noqa except Exception as err: Log.debug(traceback.format_exc()) Log.failure({"typegraph": self.typegraph.name, "error": str(err)}) return - if config.codegen: + if params.codegen: raise Exception("not implemented") try: + deploy_target = Rpc.get_deploy_target() params = TypegraphDeployParams( typegate=TypegateConnectionOptions( - url=typegate.endpoint, - auth=typegate.auth, + url=deploy_target.base_url, + auth=deploy_target.auth, ), - typegraph_path = self.typegraph_path, - prefix=config.prefix, - secrets=self.typegraph_config.secrets, - migrations_dir=self.typegraph_config.migrations_dir, - migration_actions = self.typegraph_config.migration_actions, - default_migration_action=self.typegraph_config.default_migration_action, + typegraph_path=env.typegraph_path, + prefix=env.prefix, + secrets=deploy_data.secrets, + migrations_dir=env.migrations_dir, + migration_actions=deploy_data.migration_actions, + default_migration_action=deploy_data.default_migration_action, ) ret = tg_deploy(frozen_out, params) response = ret.response @@ -128,7 +109,7 @@ def deploy(self, config: FinalizeParams): if not isinstance(response, dict): raise Exception("unexpected") - Log.success({ "typegraph": self.typegraph.name, **response }) + Log.success({"typegraph": self.typegraph.name, **response}) except Exception as err: Log.debug(traceback.format_exc()) Log.failure({"typegraph": self.typegraph.name, "error": str(err)}) diff --git a/typegraph/python/typegraph/graph/typegraph.py b/typegraph/python/typegraph/graph/typegraph.py index 79098f5af8..be2c0f312d 100644 --- a/typegraph/python/typegraph/graph/typegraph.py +++ b/typegraph/python/typegraph/graph/typegraph.py @@ -15,12 +15,13 @@ Cors as CoreCors, ) from typegraph.gen.exports.utils import Auth - from typegraph.gen.types import Err from typegraph.graph.params import Cors, RawAuth from typegraph.graph.shared_types import FinalizationResult, TypegraphOutput from typegraph.policy import Policy, PolicyPerEffect, PolicySpec, get_policy_chain +from typegraph.envs.cli import CLI_ENV from typegraph.wit import core, store, wit_utils +from typegraph.io import Log if TYPE_CHECKING: from typegraph import t @@ -177,7 +178,7 @@ def typegraph( rate: Optional[Rate] = None, cors: Optional[Cors] = None, prefix: Optional[str] = None, -) -> Callable[[Callable[[Graph], None]], Callable[[], TypegraphOutput]]: +) -> Callable[[Callable[[Graph], None]], TypegraphOutput]: def decorator(builder: Callable[[Graph], None]) -> TypegraphOutput: if name is None: import re @@ -187,6 +188,16 @@ def decorator(builder: Callable[[Graph], None]) -> TypegraphOutput: else: actual_name = name + if CLI_ENV is not None: + filter = CLI_ENV.filter + if filter is not None and actual_name not in filter: + Log.debug("typegraph '{actual_name}' skipped") + + def serialize(params: FinalizeParams): + raise Exception("typegraph was filtered out") + + return TypegraphOutput(name=actual_name, serialize=serialize) + tg = Typegraph( name=actual_name, dynamic=dynamic, @@ -238,11 +249,12 @@ def serialize_with_artifacts( from typegraph.graph.tg_manage import Manager - if Manager.is_run_from_cli(): + # run from meta/cli + if CLI_ENV is not None: manager = Manager(tg_output) manager.run() - return lambda: tg_output + return tg_output return decorator diff --git a/typegraph/python/typegraph/io.py b/typegraph/python/typegraph/io.py index b63848f043..4f7dc75f8e 100644 --- a/typegraph/python/typegraph/io.py +++ b/typegraph/python/typegraph/io.py @@ -1,3 +1,6 @@ +# Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +# SPDX-License-Identifier: MPL-2.0 + from typing import Any, Optional, Dict from fileinput import FileInput from dataclasses import dataclass @@ -10,7 +13,6 @@ class Log: - @staticmethod def __format(*largs: Any): return " ".join(map(str, largs)) @@ -47,7 +49,7 @@ class _RpcResponseReader: input: FileInput def __init__(self): - self.input = FileInput('-') + self.input = FileInput("-") def read(self, rpc_id: int): while True: @@ -63,11 +65,14 @@ def read(self, rpc_id: int): continue if parsed.get("id") != rpc_id: - Log.error(f"rpc response: expected sequential requestests, unexpected rpc id {parsed.get('id')}") + Log.error( + f"rpc response: expected sequential requestests, unexpected rpc id {parsed.get('id')}" + ) continue return parsed.get("result") + class _RpcCall: response_reader = _RpcResponseReader() latest_rpc_id = 0 @@ -76,26 +81,30 @@ class _RpcCall: def call(cls, method: str, params: Any): cls.latest_rpc_id = cls.latest_rpc_id + 1 rpc_id = cls.latest_rpc_id - rpc_message = json.dumps({ - "jsonrpc": _JSON_RPC_VERSION, - "id": rpc_id, - "method": method, - "params": params - }) + rpc_message = json.dumps( + { + "jsonrpc": _JSON_RPC_VERSION, + "id": rpc_id, + "method": method, + "params": params, + } + ) print(f"jsonrpc: {rpc_message}") return cls.response_reader.read(rpc_id) @dataclass -class TypegateConfig: - endpoint: str +class DeployTarget: + base_url: str auth: BasicAuth + @dataclass -class GlobalConfig: - typegate: Optional[TypegateConfig] - prefix: Optional[str] +class DeployData: + secrets: Dict[str, str] + default_migration_action: MigrationAction + migration_actions: Dict[str, MigrationAction] def migration_action_from_dict(raw: Dict[str, bool]) -> MigrationAction: @@ -114,39 +123,41 @@ class TypegraphConfig: default_migration_action: MigrationAction migrations_dir: str + class Rpc: - @staticmethod - def get_global_config() -> GlobalConfig: - # TODO validation?? - res = _RpcCall.call("queryGlobalConfig", None) - raw_typegate = res.get("typegate") - typegate = None - if raw_typegate is not None: - raw_auth = raw_typegate.get("auth") - typegate = TypegateConfig( - endpoint=raw_typegate.get("endpoint"), - auth=BasicAuth( - username=raw_auth.get("username"), - password=raw_auth.get("password") - ) + _deploy_target: Optional[DeployTarget] = None + + # cached + @classmethod + def get_deploy_target(cls) -> DeployTarget: + if cls._deploy_target is None: + # TODO validation?? + res = _RpcCall.call("GetDeployTarget", None) + + raw_auth = res.get("auth") + if raw_auth is None: + raise Exception(f"invalid data from rpc call: {res}") + + auth = BasicAuth(raw_auth.get("username"), raw_auth.get("password")) + + cls._deploy_target = DeployTarget( + base_url=res["baseUrl"], + auth=auth, ) - return GlobalConfig( - typegate=typegate, - prefix=res.get("prefix") - ) + + return cls._deploy_target @staticmethod - def get_typegraph_config(typegraph: str): - res = _RpcCall.call("queryTypegraphConfig", { - "typegraph": typegraph - }) - - migration_actions = { k: migration_action_from_dict(v) for k, v in res.get("migrationActions").items() } - - return TypegraphConfig( - secrets=res.get("secrets"), - artifact_resolution=res.get("artifactResolution"), - migration_actions=migration_actions, - default_migration_action=migration_action_from_dict(res.get("defaultMigrationAction")), - migrations_dir=res.get("migrationsDir") + def get_deploy_data(typegraph: str) -> DeployData: + res = _RpcCall.call("GetDeployData", {"typegraph": typegraph}) + + return DeployData( + secrets=res["secrets"], + default_migration_action=migration_action_from_dict( + res["defaultMigrationAction"] + ), + migration_actions={ + k: migration_action_from_dict(v) + for k, v in res["migrationActions"].items() + }, ) From 99ef7d3ad3490eadea5ef096a79788cd04d72ef7 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Tue, 11 Jun 2024 10:02:47 +0300 Subject: [PATCH 14/35] task i/o actor, new cli envs, followup process --- meta-cli/src/cli/serialize.rs | 12 +- meta-cli/src/deploy/actors/console/input.rs | 27 +- meta-cli/src/deploy/actors/mod.rs | 1 + meta-cli/src/deploy/actors/task.rs | 348 ++++---------- meta-cli/src/deploy/actors/task/action.rs | 28 +- meta-cli/src/deploy/actors/task/deploy.rs | 54 ++- .../deploy/actors/task/deploy/migrations.rs | 433 +++++------------- meta-cli/src/deploy/actors/task/serialize.rs | 18 +- meta-cli/src/deploy/actors/task_io.rs | 323 +++++++++++++ meta-cli/src/deploy/actors/task_manager.rs | 3 +- .../src/deploy/actors/task_manager/report.rs | 12 +- typegraph/node/sdk/src/envs/cli.ts | 100 ++++ typegraph/node/sdk/src/io.ts | 31 +- typegraph/node/sdk/src/tg_manage.ts | 175 +++---- typegraph/node/sdk/src/typegraph.ts | 6 +- typegraph/python/typegraph/envs/cli.py | 6 +- typegraph/python/typegraph/io.py | 15 +- 17 files changed, 835 insertions(+), 757 deletions(-) create mode 100644 meta-cli/src/deploy/actors/task_io.rs create mode 100644 typegraph/node/sdk/src/envs/cli.ts diff --git a/meta-cli/src/cli/serialize.rs b/meta-cli/src/cli/serialize.rs index 140edc5e4a..3fe089937a 100644 --- a/meta-cli/src/cli/serialize.rs +++ b/meta-cli/src/cli/serialize.rs @@ -8,7 +8,7 @@ use crate::deploy::actors::task::serialize::{ SerializeAction, SerializeActionGenerator, SerializeError, }; use crate::deploy::actors::task::TaskFinishStatus; -use crate::deploy::actors::task_manager::{Report, TaskManagerInit, TaskSource}; +use crate::deploy::actors::task_manager::{Report, StopReason, TaskManagerInit, TaskSource}; use crate::interlude::*; use actix_web::dev::ServerHandle; use clap::Parser; @@ -92,6 +92,15 @@ impl Action for Serialize { let report = init.run().await; + match report.stop_reason { + StopReason::Error => bail!("failed"), + StopReason::Manual | StopReason::ManualForced => { + bail!("cancelled") + } + StopReason::Natural => {} + StopReason::Restart => panic!("restart not supported for serialize"), + } + // TODO no need to report errors let tgs = report.into_typegraphs(); @@ -133,6 +142,7 @@ impl SerializeReportExt for Report { .map(|entry| match entry.status { TaskFinishStatus::Finished(results) => results .into_iter() + .map(|(_, v)| v) .collect::, SerializeError>>() .unwrap_or_else(|e| { tracing::error!( diff --git a/meta-cli/src/deploy/actors/console/input.rs b/meta-cli/src/deploy/actors/console/input.rs index 7a3b0a4aab..5e478caff0 100644 --- a/meta-cli/src/deploy/actors/console/input.rs +++ b/meta-cli/src/deploy/actors/console/input.rs @@ -9,11 +9,6 @@ use owo_colors::OwoColorize; use crate::deploy::actors::console::{Console, ConsoleActor}; -pub trait ConfirmHandler: std::fmt::Debug { - fn on_confirm(&self); - fn on_deny(&self) {} -} - pub struct OptionLabel<'a> { primary: Cow<'a, str>, secondary: Option>, @@ -33,8 +28,8 @@ impl<'a> OptionLabel<'a> { } } -pub trait SelectOption: std::fmt::Debug { - fn on_select(&self); +pub trait SelectOption: std::fmt::Debug { + fn get_value(&self) -> Value; fn label(&self) -> OptionLabel<'_>; } @@ -52,16 +47,15 @@ impl Select { max_retry_count: 0, } } - pub fn max_retry_count(mut self, max_retry_count: usize) -> Self { self.max_retry_count = max_retry_count; self } - pub async fn interact( + pub async fn interact( self, - options: &[Box], - ) -> Result { + options: &[Box + Sync + Send + 'static>], + ) -> Result<(usize, V)> { let mut retry_left = self.max_retry_count; self.console @@ -82,8 +76,8 @@ impl Select { match input.trim().parse::() { Ok(i) if i > 0 && i <= options.len() => { - options[i - 1].on_select(); - return Ok(i - 1); + let value = options[i - 1].get_value(); + return Ok((i - 1, value)); } _ => { log::error!("invalid option, please try again"); @@ -118,10 +112,7 @@ impl Confirm { self } - pub async fn interact( - self, - handler: Box, - ) -> Result { + pub async fn interact(self) -> Result { let mut retry_left = self.max_retry_count as isize; loop { @@ -132,11 +123,9 @@ impl Confirm { match input.trim().to_lowercase().as_str() { "y" | "yes" => { - handler.on_confirm(); return Ok(true); } "n" | "no" => { - handler.on_deny(); return Ok(false); } _ => { diff --git a/meta-cli/src/deploy/actors/mod.rs b/meta-cli/src/deploy/actors/mod.rs index de556fe75d..287331b3ac 100644 --- a/meta-cli/src/deploy/actors/mod.rs +++ b/meta-cli/src/deploy/actors/mod.rs @@ -4,5 +4,6 @@ pub mod console; pub mod discovery; pub mod task; +mod task_io; pub mod task_manager; pub mod watcher; diff --git a/meta-cli/src/deploy/actors/task.rs b/meta-cli/src/deploy/actors/task.rs index 32c6c5c73a..1429dc42ad 100644 --- a/meta-cli/src/deploy/actors/task.rs +++ b/meta-cli/src/deploy/actors/task.rs @@ -24,15 +24,15 @@ use self::action::{ActionFinalizeContext, ActionResult, TaskAction}; use super::console::{Console, ConsoleActor}; use super::task_manager::{self, TaskManager}; use crate::config::Config; +use crate::deploy::actors::task_io::TaskIoActor; use crate::interlude::*; -use color_eyre::owo_colors::OwoColorize; +use action::{get_typegraph_name, TaskActionGenerator}; use common::typegraph::Typegraph; -use futures::lock::Mutex; +use indexmap::IndexMap; use process_wrap::tokio::TokioChildWrapper; use serde::Deserialize; use std::time::Duration; -use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader, Lines}; -use tokio::process::{ChildStdin, ChildStdout, Command}; +use tokio::process::Command; pub mod message { use super::*; @@ -43,18 +43,12 @@ pub mod message { #[derive(Message)] #[rtype(result = "()")] - pub(super) struct ProcessOutput { - pub stdout: ChildStdout, - } + pub(super) struct RestartProcessWithOptions(pub A::Options); /// wait for process termination #[derive(Message)] #[rtype(result = "()")] - pub(super) struct CheckProcessStatus; - - #[derive(Message)] - #[rtype(result = "()")] - pub(super) struct CollectOutput(pub ActionResult); + pub(super) struct WaitForProcess(pub Option); #[derive(Message)] #[rtype(result = "()")] @@ -66,11 +60,11 @@ pub mod message { #[derive(Message)] #[rtype(result = "()")] - pub(super) struct Rpc(pub RpcRequest); + pub struct Results(pub Vec>); #[derive(Message)] #[rtype(result = "()")] - pub(super) struct SendRpcResponse(pub RpcResponse); + pub struct UpdateResults(pub Vec>); } use message::*; @@ -90,20 +84,19 @@ const DEFAULT_TIMEOUT: u64 = 120; pub enum TaskFinishStatus { Cancelled, Error, - Finished(Vec>), + Finished(IndexMap>), } pub struct TaskActor { config: Arc, + action_generator: A::Generator, action: A, process: Option>, - // TODO separate i/o actor, and write queue instead of mutex - process_stdin: Option>>, + io: Option>>, task_manager: Addr>, console: Addr, - collected_output: Vec>, + results: IndexMap>, // for the report timeout_duration: Duration, - followup_task_options: Option, } impl TaskActor @@ -112,18 +105,20 @@ where { pub fn new( config: Arc, - action: A, + action_generator: A::Generator, + initial_action: A, task_manager: Addr>, console: Addr, ) -> Self { Self { config, process: None, - process_stdin: None, + io: None, task_manager, console, - action, - collected_output: Default::default(), + action_generator, + action: initial_action, + results: Default::default(), // TODO doc? timeout_duration: Duration::from_secs( std::env::var(TIMEOUT_ENV_NAME) @@ -137,7 +132,6 @@ where }) .unwrap_or(DEFAULT_TIMEOUT), ), - followup_task_options: None, } } @@ -148,12 +142,8 @@ where fn get_path_owned(&self) -> Arc { self.action.get_task_ref().path.clone() } -} - -impl Actor for TaskActor { - type Context = Context; - fn started(&mut self, ctx: &mut Self::Context) { + fn start_process(&mut self, ctx: &mut ::Context) { let addr = ctx.address(); let console = self.console.clone(); let action = self.action.clone(); @@ -175,6 +165,14 @@ impl Actor for TaskActor { ctx.spawn(fut.in_current_span().into_actor(self)); } +} + +impl Actor for TaskActor { + type Context = Context; + + fn started(&mut self, ctx: &mut Self::Context) { + self.start_process(ctx); + } fn stopped(&mut self, _ctx: &mut Self::Context) { trace!("task actor stopped: {:?}", self.get_path()); @@ -196,19 +194,22 @@ impl Handler for TaskActor { match spawn_res { Ok(mut child) => { - let stdout = child.stdout().take(); - let Some(stdout) = stdout else { - self.console.error( - self.action - .get_error_message("could not read output from process"), - ); - ctx.address().do_send(Exit(TaskFinishStatus::::Error)); - return; + let io_actor = TaskIoActor::init( + ctx.address(), + self.action.clone(), + &mut child, + self.console.clone(), + ); + + self.io = match io_actor { + Ok(io_actor) => Some(io_actor), + Err(e) => { + self.console.error(e.to_string()); + ctx.address().do_send(Exit(TaskFinishStatus::::Error)); + return; + } }; - ctx.address().do_send(ProcessOutput { stdout }); - - self.process_stdin = Some(Arc::new(Mutex::new(child.stdin().take().unwrap()))); self.process = Some(child); let addr = ctx.address(); @@ -233,41 +234,28 @@ impl Handler for TaskActor { } } -impl Handler for TaskActor { +impl Handler> for TaskActor { type Result = (); fn handle( &mut self, - ProcessOutput { stdout }: ProcessOutput, - ctx: &mut Context, + RestartProcessWithOptions(options): RestartProcessWithOptions, + ctx: &mut Self::Context, ) -> Self::Result { - let addr = ctx.address(); - let console = self.console.clone(); - let path = self.get_path_owned(); - - let fut = async move { - let reader = BufReader::new(stdout).lines(); - if let Err(e) = - Self::loop_output_lines(reader, addr.clone(), console.clone(), path.clone()).await - { - console.error(format!( - "failed to read process output on {:?}: {e:#}", - path - )); - addr.do_send(Exit(TaskFinishStatus::::Error)) - } else { - // end of stdout - addr.do_send(CheckProcessStatus); - } - }; - ctx.spawn(fut.in_current_span().into_actor(self)); + let task_ref = self.action.get_task_ref().clone(); + self.action = self.action_generator.generate(task_ref, options); + self.start_process(ctx); } } -impl Handler for TaskActor { +impl Handler> for TaskActor { type Result = (); - fn handle(&mut self, _msg: CheckProcessStatus, ctx: &mut Context) -> Self::Result { + fn handle( + &mut self, + WaitForProcess(followup_options): WaitForProcess, + ctx: &mut Context, + ) -> Self::Result { let Some(process) = self.process.take() else { self.console .error(format!("task process not found for {:?}", self.get_path())); @@ -280,11 +268,16 @@ impl Handler for TaskActor { let action = self.action.clone(); let fut = async move { + // TODO timeout? match Box::into_pin(process.wait_with_output()).await { Ok(output) => { if output.status.success() { - // logging in Exit handler - addr.do_send(Exit(TaskFinishStatus::::Finished(Default::default()))); + if let Some(followup_options) = followup_options { + addr.do_send(RestartProcessWithOptions(followup_options)) + } else { + // logging in Exit handler + addr.do_send(Exit(TaskFinishStatus::::Finished(Default::default()))); + } } else { console.error(action.get_error_message(&format!( "process failed with code {:?}", @@ -312,133 +305,45 @@ impl Handler for TaskActor { } } -#[derive(Clone, Copy)] -enum OutputLevel { - Debug, - Info, - Warning, - Error, -} - -#[derive(Serialize, Deserialize, Debug)] -enum JsonRpcVersion { - #[serde(rename = "2.0")] - V2, -} - -#[derive(Deserialize, Debug)] -struct RpcRequest { - jsonrpc: JsonRpcVersion, - id: u32, - #[serde(flatten)] - call: serde_json::Value, -} - -impl RpcRequest { - fn response(&self, result: serde_json::Value) -> RpcResponse { - RpcResponse { - jsonrpc: JsonRpcVersion::V2, - id: self.id, - result, - } - } -} - -#[derive(Serialize, Debug)] -struct RpcResponse { - jsonrpc: JsonRpcVersion, - id: u32, - result: serde_json::Value, -} - -impl TaskActor { - async fn loop_output_lines( - mut reader: Lines>, - addr: Addr>, - console: Addr, - path: Arc, - ) -> tokio::io::Result<()> { - let mut latest_level = OutputLevel::Info; - - let scope = format!("[{path}]", path = path.display()); - let scope = scope.yellow(); - - while let Some(line) = reader.next_line().await? { - if let Some(debug) = line.strip_prefix("debug: ") { - console.debug(format!("{scope} {debug}")); - latest_level = OutputLevel::Debug; - continue; - } - - if let Some(info) = line.strip_prefix("info: ") { - console.info(format!("{scope} {info}")); - latest_level = OutputLevel::Info; - continue; - } - - if let Some(warn) = line.strip_prefix("warning: ") { - console.warning(format!("{scope} {warn}")); - latest_level = OutputLevel::Warning; - continue; - } - - if let Some(error) = line.strip_prefix("error: ") { - console.error(format!("{scope} {error}")); - latest_level = OutputLevel::Error; - continue; - } - - if let Some(data_json) = line.strip_prefix("success: ") { - let data: A::SuccessData = serde_json::from_str(data_json)?; - addr.do_send(CollectOutput(Ok(data))); - continue; - } - - if let Some(data_json) = line.strip_prefix("failure: ") { - let data: A::FailureData = serde_json::from_str(data_json)?; - addr.do_send(CollectOutput(Err(data))); - continue; - } +impl Handler> for TaskActor { + type Result = (); - if let Some(req) = line.strip_prefix("jsonrpc: ") { - let req: RpcRequest = serde_json::from_str(req)?; - addr.do_send(message::Rpc(req)); - continue; - } + fn handle(&mut self, results: Results, ctx: &mut Context) -> Self::Result { + let self_addr = ctx.address(); + let action = self.action.clone(); + let finalize_ctx = ActionFinalizeContext { + config: self.config.clone(), + task_manager: self.task_manager.clone(), + task: ctx.address(), + console: self.console.clone(), + }; - match latest_level { - OutputLevel::Debug => { - console.debug(format!("{scope}>{line}")); - } - OutputLevel::Info => { - console.info(format!("{scope}>{line}")); - } - OutputLevel::Warning => { - console.warning(format!("{scope}>{line}")); - } - OutputLevel::Error => { - console.error(format!("{scope}>{line}")); + let fut = async move { + let mut followup: Option = None; + for result in &results.0 { + match action.finalize(result, finalize_ctx.clone()).await { + Ok(Some(followup_opt)) => { + let followup = followup.get_or_insert_with(Default::default); + followup_opt.add_to_options(followup); + } + _ => (), } } - } - Ok(()) + self_addr.do_send(message::UpdateResults(results.0)); + self_addr.do_send(WaitForProcess(followup)); + }; + ctx.spawn(fut.in_current_span().into_actor(self)); } } -impl Handler> for TaskActor { +impl Handler> for TaskActor { type Result = (); - fn handle(&mut self, message: CollectOutput, ctx: &mut Context) -> Self::Result { - self.action.finalize( - &message.0, - ActionFinalizeContext { - config: self.config.clone(), - task_manager: self.task_manager.clone(), - task: ctx.address(), - console: self.console.clone(), - }, - ); - self.collected_output.push(message.0); + fn handle(&mut self, UpdateResults(results): UpdateResults, ctx: &mut Context) { + for result in results.into_iter() { + let tg_name = get_typegraph_name::(&result); + self.results.insert(tg_name, result); + } } } @@ -447,7 +352,7 @@ impl Handler> for TaskActor { fn handle(&mut self, mut message: Exit, ctx: &mut Context) -> Self::Result { if let TaskFinishStatus::::Finished(res) = &mut message.0 { - std::mem::swap(res, &mut self.collected_output); + std::mem::swap(res, &mut self.results); } self.task_manager .do_send(task_manager::message::TaskFinished { @@ -469,74 +374,3 @@ impl Handler for TaskActor { } } } - -impl Handler for TaskActor { - type Result = (); - - fn handle(&mut self, Rpc(req): Rpc, ctx: &mut Context) -> Self::Result { - let action = self.action.clone(); - let addr = ctx.address(); - - let rpc_call: A::RpcCall = match serde_json::from_value(req.call.clone()) { - Ok(rpc_call) => rpc_call, - Err(err) => { - self.console - .error(format!("invalid jsonrpc request {req:?}: {err:?}")); - addr.do_send(Exit(TaskFinishStatus::::Error)); - return; - } - }; - - let console = self.console.clone(); - - let fut = async move { - let id = req.id; - match action.get_rpc_response(&rpc_call).await { - Ok(response) => { - addr.do_send(message::SendRpcResponse(req.response(response))); - } - Err(err) => { - console.error(format!("failed to handle jsonrpc call {req:?}: {err:?}")); - addr.do_send(Exit(TaskFinishStatus::::Error)); - } - } - }; - - ctx.spawn(fut.in_current_span().into_actor(self)); - } -} - -impl Handler for TaskActor { - type Result = (); - - fn handle( - &mut self, - SendRpcResponse(response): SendRpcResponse, - ctx: &mut Context, - ) -> Self::Result { - { - let response_id = response.id; - match serde_json::to_string(&response) { - Ok(response) => { - let stdin = self.process_stdin.clone().unwrap(); - let fut = async move { - let mut stdin = stdin.lock().await; - stdin - .write_all(response.as_bytes()) - .await - .expect("could not write rpc response to process stdin"); - stdin - .write_all(b"\n") - .await - .expect("could not write newline to process stdin"); - }; - ctx.spawn(fut.in_current_span().into_actor(self)); - } - Err(e) => { - self.console - .error(format!("could not serialize rpc response {e}")); - } - } - }; - } -} diff --git a/meta-cli/src/deploy/actors/task/action.rs b/meta-cli/src/deploy/actors/task/action.rs index 8b307addae..f8b34a0aab 100644 --- a/meta-cli/src/deploy/actors/task/action.rs +++ b/meta-cli/src/deploy/actors/task/action.rs @@ -30,6 +30,7 @@ pub trait TaskActionGenerator: Clone { fn get_shared_config(&self) -> Arc; } +#[derive(Clone)] pub struct ActionFinalizeContext { pub config: Arc, pub task_manager: Addr>, @@ -39,6 +40,7 @@ pub struct ActionFinalizeContext { pub trait OutputData: serde::de::DeserializeOwned + std::fmt::Debug + Unpin + Send { fn get_typegraph_name(&self) -> String; + fn is_success(&self) -> bool; } #[derive(Default, Debug, Clone)] @@ -48,6 +50,21 @@ pub enum TaskFilter { Typegraphs(Vec), } +impl TaskFilter { + pub fn add_typegraph(&mut self, name: String) { + match self { + TaskFilter::All => { + *self = TaskFilter::Typegraphs(vec![name]); + } + TaskFilter::Typegraphs(typegraphs) => { + if !typegraphs.contains(&name) { + typegraphs.push(name); + } + } + } + } +} + impl ToString for TaskFilter { fn to_string(&self) -> String { match self { @@ -57,6 +74,10 @@ impl ToString for TaskFilter { } } +pub trait FollowupOption { + fn add_to_options(&self, options: &mut A::Options); +} + pub trait TaskAction: std::fmt::Debug + Clone + Send + Unpin { type SuccessData: OutputData; type FailureData: OutputData; @@ -72,7 +93,12 @@ pub trait TaskAction: std::fmt::Debug + Clone + Send + Unpin { fn get_start_message(&self) -> String; fn get_error_message(&self, err: &str) -> String; - fn finalize(&self, res: &ActionResult, ctx: ActionFinalizeContext); + /// returns followup task options + async fn finalize( + &self, + res: &Result, + ctx: ActionFinalizeContext, + ) -> Result>>>; async fn get_rpc_response(&self, call: &Self::RpcCall) -> Result; } diff --git a/meta-cli/src/deploy/actors/task/deploy.rs b/meta-cli/src/deploy/actors/task/deploy.rs index c83c8b791a..2c15348667 100644 --- a/meta-cli/src/deploy/actors/task/deploy.rs +++ b/meta-cli/src/deploy/actors/task/deploy.rs @@ -4,8 +4,8 @@ mod migrations; use super::action::{ - ActionFinalizeContext, ActionResult, OutputData, SharedActionConfig, TaskAction, - TaskActionGenerator, TaskFilter, + ActionFinalizeContext, ActionResult, FollowupOption, OutputData, SharedActionConfig, + TaskAction, TaskActionGenerator, TaskFilter, }; use super::command::build_task_command; use crate::deploy::actors::console::Console; @@ -27,7 +27,7 @@ pub struct MigrationAction { pub reset: bool, // reset database if necessary } -#[derive(Debug, PartialEq, Eq, Hash)] +#[derive(Clone, Debug, PartialEq, Eq, Hash)] pub struct PrismaRuntimeId { pub typegraph: String, pub name: String, @@ -132,12 +132,19 @@ impl OutputData for DeploySuccess { fn get_typegraph_name(&self) -> String { self.typegraph.clone() } + + fn is_success(&self) -> bool { + self.failure.is_none() + } } impl OutputData for DeployError { fn get_typegraph_name(&self) -> String { self.typegraph.clone() } + fn is_success(&self) -> bool { + false + } } #[derive(Debug, Default)] @@ -158,6 +165,17 @@ pub enum RpcCall { GetDeployData { typegraph: String }, } +struct ResetDatabase(PrismaRuntimeId); + +impl FollowupOption for ResetDatabase { + fn add_to_options(&self, options: &mut DeployOptions) { + options.filter.add_typegraph(self.0.typegraph.clone()); + options + .migration_options + .push((self.0.clone(), MigrationActionOverride::ResetDatabase)); + } +} + impl TaskAction for DeployAction { type SuccessData = DeploySuccess; type FailureData = DeployError; @@ -194,7 +212,11 @@ impl TaskAction for DeployAction { ) } - fn finalize(&self, res: &ActionResult, ctx: ActionFinalizeContext) { + async fn finalize( + &self, + res: &ActionResult, + ctx: ActionFinalizeContext, + ) -> Result>>> { match res { Ok(data) => { let scope = format!("({path})", path = self.task_ref.path.display()); @@ -212,6 +234,7 @@ impl TaskAction for DeployAction { let tg_name = data.get_typegraph_name(); + // TODO async self.unpack_migrations(&tg_name, &data.migrations, &ctx, &scope); match &data.failure { @@ -223,7 +246,26 @@ impl TaskAction for DeployAction { path = self.task_ref.path.display().yellow(), )); - self.handle_push_failure(&tg_name, failure, &ctx, &scope); + let followup_option = self + .handle_push_failure( + &tg_name, + &self.task_ref.path, + failure, + &ctx, + &scope, + ) + .await?; + + Ok(followup_option.map(|opt| match opt.1 { + MigrationActionOverride::ResetDatabase => { + let res: Box> = + Box::new(ResetDatabase(PrismaRuntimeId { + typegraph: tg_name, + name: opt.0, + })); + res + } + })) } None => { ctx.console.info(format!( @@ -232,6 +274,7 @@ impl TaskAction for DeployAction { name = tg_name.cyan(), path = self.task_ref.path.display().yellow(), )); + Ok(None) } } } @@ -244,6 +287,7 @@ impl TaskAction for DeployAction { path = self.task_ref.path.display().yellow(), err = data.error, )); + Ok(None) } } } diff --git a/meta-cli/src/deploy/actors/task/deploy/migrations.rs b/meta-cli/src/deploy/actors/task/deploy/migrations.rs index 3d17f2b645..f9bd12f0c3 100644 --- a/meta-cli/src/deploy/actors/task/deploy/migrations.rs +++ b/meta-cli/src/deploy/actors/task/deploy/migrations.rs @@ -7,7 +7,7 @@ use super::{ DeployAction, DeployActionInner, DeployOptions, Migration, MigrationActionOverride, PrismaRuntimeId, }; -use crate::deploy::actors::console::input::{Confirm, ConfirmHandler, Select}; +use crate::deploy::actors::console::input::{Confirm, Select, SelectOption}; use crate::deploy::actors::console::Console; use crate::deploy::actors::task::action::{ActionFinalizeContext, TaskFilter}; use crate::deploy::actors::task::TaskActor; @@ -52,7 +52,10 @@ enum PushFailure { NullConstraintViolation(NullConstraintViolation), } +type RuntimeName = String; + impl DeployActionInner { + // TODO why not async? pub(super) fn unpack_migrations( &self, tg_name: &str, @@ -81,49 +84,121 @@ impl DeployActionInner { } } - pub(super) fn handle_push_failure( + pub(super) async fn handle_push_failure( &self, tg_name: &str, + typegraph_path: &Path, failure_raw: &str, ctx: &ActionFinalizeContext>, scope: &impl std::fmt::Display, - ) { - let failure = serde_json::from_str::(failure_raw); + ) -> Result> { + let failure = serde_json::from_str::(failure_raw) + .context("failed to parse failure data")?; match failure { - Ok(PushFailure::Unknown(error)) => { + PushFailure::Unknown(error) => { ctx.console.error(format!( "{scope} unknown error: {msg}", scope = scope, msg = error.message, )); - } - Ok(PushFailure::DatabaseResetRequired(error)) => { - ctx.task.do_send(message::ConfirmDatabaseReset { - typegraph: tg_name.to_string(), - runtime: error.runtime_name.clone(), - message: error.message.clone(), - }); + Ok(None) } - Ok(PushFailure::NullConstraintViolation(error)) => { - ctx.task.do_send(message::ResolveConstraintViolation { - typegraph: tg_name.to_string(), - runtime: error.runtime_name.clone(), - column: error.column.clone(), - migration: error.migration_name.clone(), - is_new_column: error.is_new_column, - table: error.table.clone(), - message: error.message.clone(), - }); + PushFailure::DatabaseResetRequired(error) => { + ctx.console + .error(format!("{scope} {message}", message = error.message)); + ctx.console.warning(format!( + "{scope} database reset required for prisma runtime {rt} in typegraph {name}", + name = tg_name.cyan(), + rt = error.runtime_name.magenta(), + )); + + let reset = Confirm::new( + ctx.console.clone(), + format!( + "{scope} Do you want to reset the database for prisma runtime {rt} in typegraph {name}?", + scope = scope.yellow(), + name = tg_name.cyan(), + rt = error.runtime_name.magenta(), + ), + ).interact( ).await.context("failed to read user input")?; + + if reset { + Ok(Some(( + error.runtime_name, + MigrationActionOverride::ResetDatabase, + ))) + } else { + Ok(None) + } } - Err(err) => { - ctx.console.error(format!( - "{scope} failed to parse push failure data: {err:?}", - scope = scope, - err = err - )); + PushFailure::NullConstraintViolation(error) => { + ctx.console + .error(format!("{scope} {message}", message = error.message)); + + if error.is_new_column { + ctx.console.info(format!("{scope} manually edit the migration {migration} or remove the migration and set a default value in the typegraph", migration = error.migration_name)); + } + + use options::ConstraintViolationOptions as Options; + let (_, choice) = Select::new( + ctx.console.clone(), + "Choose one of the following options".to_string(), + ) + .interact(&[ + Box::new(options::RemoveLatestMigration), + Box::new(options::ManualResolution { + message: Some(format!( + "Set a default value for the column `{}` in the table `{}`", + error.column, error.table + )), + }), + Box::new(options::ForceReset), + ]) + .await + .context("failed to read user input: {err}")?; + + match choice { + Options::RemoveLatestMigration => { + let migration_path = ctx + .config + .prisma_migration_dir_abs(tg_name) + .join(&error.runtime_name) + .join(&error.migration_name); + tokio::fs::remove_dir_all(&migration_path) + .await + .with_context(|| { + format!("failed to remove migrations at {migration_path:?}") + })?; + ctx.console + .info(format!("Removed migration directory: {migration_path:?}")); + ctx.console.info(format!("You can now update your typegraph at {} to create an alternative non-breaking schema.", typegraph_path.to_str().unwrap().bold())); + Ok(Some(( + error.runtime_name, + MigrationActionOverride::ResetDatabase, + ))) + } + Options::ManualResolution => { + let migration_path = ctx + .config + .prisma_migration_dir_abs(tg_name) + .join(&error.runtime_name) + .join(&error.migration_name); + eprintln!("Edit the migration file at {migration_path:?} then press enter to continue..."); + + ctx.console.read_line().await; + Ok(Some(( + error.runtime_name, + MigrationActionOverride::ResetDatabase, + ))) + } + Options::ForceReset => Ok(Some(( + error.runtime_name, + MigrationActionOverride::ResetDatabase, + ))), + } } } } @@ -185,274 +260,21 @@ pub struct ConfirmDatabaseResetRequired { pub runtime_name: String, } -impl ConfirmHandler for ConfirmDatabaseResetRequired { - fn on_confirm(&self) { - self.task.do_send(message::ResetDatabase { - typegraph: self.tg_name.clone(), - runtime: self.runtime_name.clone(), - }) - } -} - -impl Handler for TaskActor { - type Result = (); - - fn handle(&mut self, msg: ConfirmDatabaseReset, ctx: &mut Self::Context) { - let ConfirmDatabaseReset { - typegraph, - runtime, - message, - } = msg; - let scope = format!("({})", self.action.task_ref.path.display()); - - self.console.error(format!("{scope} {message}")); - self.console.warning(format!( - "{scope} database reset required for prisma runtime {rt} in typegraph {name}", - scope = scope.yellow(), - name = typegraph.cyan(), - rt = runtime.magenta(), - )); - - let console = self.console.clone(); - let addr = ctx.address(); - - let fut = async move { - let res = Confirm::new( - console.clone(), - format!( - "{scope} Do you want to reset the database for prisma runtime {rt} in typegraph {name}?", - scope = scope.yellow(), - name = typegraph.cyan(), - rt = runtime.magenta(), - ), - ).interact( - Box::new(ConfirmDatabaseResetRequired { - task: addr, - tg_name: typegraph, - runtime_name: runtime, - }) - - ).await; - - if let Err(err) = res { - console.error(format!("failed to read user input: {err}", err = err)); - } - }; - ctx.spawn(fut.in_current_span().into_actor(self)); - } -} - -impl Handler for TaskActor { - type Result = (); - - fn handle(&mut self, msg: ResetDatabase, _: &mut Self::Context) { - let options = if let Some(options) = self.followup_task_options.as_mut() { - options - } else { - self.followup_task_options = Some(DeployOptions { - filter: TaskFilter::Typegraphs(Default::default()), - migration_options: Default::default(), - }); - self.followup_task_options.as_mut().unwrap() - }; - { - let typegraphs = match options.filter { - TaskFilter::Typegraphs(ref mut typegraphs) => typegraphs, - _ => unreachable!(), - }; - if !typegraphs.contains(&msg.typegraph) { - typegraphs.push(msg.typegraph.clone()); - } - } - options.migration_options.push(( - PrismaRuntimeId { - typegraph: msg.typegraph.clone(), - name: msg.runtime.clone(), - }, - MigrationActionOverride::ResetDatabase, - )); - - // /// Set `reset` to `true` for the specified prisma runtime + re-run the typegraph - // fn do_force_reset(loader: &Addr, tg_path: PathBuf, runtime_name: String) { - // // reset - // let glob_cfg = ServerStore::get_migration_action_glob(); - // ServerStore::set_migration_action( - // tg_path.clone(), - // RuntimeMigrationAction { - // runtime_name, - // action: MigrationAction { - // reset: true, // ! - // create: glob_cfg.create, - // }, - // }, - // ); - // - // // reload - // loader.do_send(LoadModule(tg_path.into())); - // } - } -} - -impl Handler for TaskActor { - type Result = (); - - fn handle(&mut self, msg: ResolveConstraintViolation, ctx: &mut Self::Context) { - let ResolveConstraintViolation { - typegraph, - runtime, - column, - migration, - is_new_column, - table, - message, - } = msg; - - let scope = format!("({})", self.action.task_ref.path.display()); - let scope = scope.yellow(); - - self.console.error(format!("{scope} {message}")); - - if is_new_column { - self.console.info(format!("{scope} manually edit the migration {migration} or remove the migration and set a default value")); - - let remove_latest = options::RemoveLatestMigration { - task: ctx.address(), - typegraph: typegraph.clone(), - runtime: runtime.clone(), - migration: migration.clone(), - }; - - let manual = options::ManualResolution { - task: ctx.address(), - typegraph: typegraph.clone(), - runtime: runtime.clone(), - migration: migration.clone(), - message: Some(format!( - "Set a default value for the column `{}` in the table `{}`", - column, table - )), - }; - - let reset = options::ForceReset { - task: ctx.address(), - typegraph: typegraph.clone(), - runtime: runtime.clone(), - }; - - let fut = async move { - let res = Select::new( - self.console.clone(), - "Choose one of the following options".to_string(), - ) - .interact(&[Box::new(remove_latest), Box::new(manual), Box::new(reset)]) - .await; - if let Err(err) = res { - self.console - .error(format!("failed to read user input: {err}", err = err)); - } - }; - } - } -} - -impl Handler for TaskActor { - type Result = (); - - fn handle(&mut self, msg: message::RemoveLatestMigration, ctx: &mut Self::Context) { - let message::RemoveLatestMigration { - typegraph, - runtime, - migration, - } = msg; - - let migration_path = self - .config - .prisma_migration_dir_abs(&typegraph) - .join(&runtime) - .join(&migration); - - // let typegraph = typegraph.clone(); - // let runtime_name = runtime.clone(); - let console = self.console.clone(); - let typegraph_path = self.action.task_ref.path.clone(); - let addr = ctx.address(); - - let fut = async move { - let res = tokio::fs::remove_dir_all(&migration_path).await; - match res { - Ok(_) => { - console.info(format!("Removed migration directory: {:?}", migration_path)); - console.info(format!( - "You can now update your typegraph at {} to create an alternative non-breaking schema.", - typegraph_path.display().to_string().bold() - )); - - addr.do_send(message::ResetDatabase { typegraph, runtime }); - } - Err(err) => { - console.error(format!( - "Failed to remove migration directory: {:?}", - migration_path - )); - console.error(format!("{err}", err = err)); - } - } - }; - - ctx.spawn(fut.in_current_span().into_actor(self)); - } -} - -impl Handler for TaskActor { - type Result = (); - - fn handle(&mut self, msg: message::WaitForManualResolution, ctx: &mut Self::Context) { - let migration_path = self - .config - .prisma_migration_dir_abs(&msg.typegraph) - .join(&msg.runtime) - .join(msg.migration) - .join("migration.sql"); - eprintln!( - "Edit the migration file at {:?} then press enter to continue...", - migration_path - ); - - let console = self.console.clone(); - let addr = ctx.address(); - - let fut = async move { - console.read_line().await; - addr.do_send(message::ResetDatabase { - typegraph: msg.typegraph, - runtime: msg.runtime, - }); - }; - ctx.spawn(fut.in_current_span().into_actor(self)); - } -} - mod options { use crate::deploy::actors::console::input::{OptionLabel, SelectOption}; - use crate::deploy::actors::task::deploy::DeployAction; - use crate::deploy::actors::task::TaskActor; - use crate::interlude::*; - #[derive(Debug)] - pub struct RemoveLatestMigration { - pub task: Addr>, - pub typegraph: String, - pub runtime: String, - pub migration: String, // is this necessary?? + pub enum ConstraintViolationOptions { + RemoveLatestMigration, + ManualResolution, + ForceReset, } - impl SelectOption for RemoveLatestMigration { - fn on_select(&self) { - self.task.do_send(super::message::RemoveLatestMigration { - typegraph: self.typegraph.clone(), - runtime: self.runtime.clone(), - migration: self.migration.clone(), - }); + #[derive(Debug)] + pub struct RemoveLatestMigration; + + impl SelectOption for RemoveLatestMigration { + fn get_value(&self) -> ConstraintViolationOptions { + ConstraintViolationOptions::RemoveLatestMigration } fn label(&self) -> OptionLabel<'_> { @@ -462,20 +284,12 @@ mod options { #[derive(Debug)] pub struct ManualResolution { - pub task: Addr>, - pub typegraph: String, - pub runtime: String, - pub migration: String, pub message: Option, } - impl SelectOption for ManualResolution { - fn on_select(&self) { - self.task.do_send(super::message::WaitForManualResolution { - typegraph: self.typegraph.clone(), - runtime: self.runtime.clone(), - migration: self.migration.clone(), - }); + impl SelectOption for ManualResolution { + fn get_value(&self) -> ConstraintViolationOptions { + ConstraintViolationOptions::ManualResolution } fn label(&self) -> OptionLabel<'_> { @@ -489,18 +303,11 @@ mod options { } #[derive(Debug)] - pub struct ForceReset { - pub task: Addr>, - pub typegraph: String, - pub runtime: String, - } + pub struct ForceReset; - impl SelectOption for ForceReset { - fn on_select(&self) { - self.task.do_send(super::message::ResetDatabase { - typegraph: self.typegraph.clone(), - runtime: self.runtime.clone(), - }); + impl SelectOption for ForceReset { + fn get_value(&self) -> ConstraintViolationOptions { + ConstraintViolationOptions::ForceReset } fn label(&self) -> OptionLabel<'_> { diff --git a/meta-cli/src/deploy/actors/task/serialize.rs b/meta-cli/src/deploy/actors/task/serialize.rs index bcaeda5876..fed674a29a 100644 --- a/meta-cli/src/deploy/actors/task/serialize.rs +++ b/meta-cli/src/deploy/actors/task/serialize.rs @@ -2,8 +2,8 @@ // SPDX-License-Identifier: MPL-2.0 use super::action::{ - ActionFinalizeContext, ActionResult, OutputData, SharedActionConfig, TaskAction, - TaskActionGenerator, TaskFilter, + ActionFinalizeContext, ActionResult, FollowupOption, OutputData, SharedActionConfig, + TaskAction, TaskActionGenerator, TaskFilter, }; use super::command::build_task_command; use super::deploy::MigrationAction; @@ -76,6 +76,9 @@ impl OutputData for Box { fn get_typegraph_name(&self) -> String { self.name().unwrap() } + fn is_success(&self) -> bool { + true + } } #[derive(Debug, Default)] @@ -87,6 +90,9 @@ impl OutputData for SerializeError { fn get_typegraph_name(&self) -> String { self.typegraph.clone() } + fn is_success(&self) -> bool { + false + } } impl TaskAction for SerializeAction { @@ -125,7 +131,11 @@ impl TaskAction for SerializeAction { ) } - fn finalize(&self, res: &ActionResult, ctx: ActionFinalizeContext) { + async fn finalize( + &self, + res: &ActionResult, + ctx: ActionFinalizeContext, + ) -> Result>>> { match res { Ok(data) => { ctx.console.info(format!( @@ -145,6 +155,8 @@ impl TaskAction for SerializeAction { )); } } + + Ok(None) } fn get_task_ref(&self) -> &crate::deploy::actors::task_manager::TaskRef { diff --git a/meta-cli/src/deploy/actors/task_io.rs b/meta-cli/src/deploy/actors/task_io.rs new file mode 100644 index 0000000000..2d8e2021f9 --- /dev/null +++ b/meta-cli/src/deploy/actors/task_io.rs @@ -0,0 +1,323 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + +use super::{ + console::Console, + task::{ + action::{ActionResult, TaskAction}, + TaskActor, + }, +}; +use crate::deploy::actors::console::ConsoleActor; +use crate::interlude::*; +use colored::OwoColorize; +use futures::lock::Mutex; +use process_wrap::tokio::TokioChildWrapper; +use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader}; +use tokio::process::ChildStdin; + +mod message { + use super::*; + #[derive(Message)] + #[rtype(result = "()")] + pub(super) struct OutputLine(pub String); + + #[derive(Message)] + #[rtype(result = "()")] + pub(super) struct SendRpcResponse(pub RpcResponse); + + #[derive(Message)] + #[rtype(result = "()")] + pub(super) struct Exit; +} + +#[derive(Clone, Copy)] +enum OutputLevel { + Debug, + Info, + Warning, + Error, +} + +#[derive(Serialize, Deserialize, Debug)] +enum JsonRpcVersion { + #[serde(rename = "2.0")] + V2, +} + +#[derive(Deserialize, Debug)] +struct RpcRequest { + jsonrpc: JsonRpcVersion, + id: u32, + #[serde(flatten)] + call: serde_json::Value, +} + +impl RpcRequest { + fn response(&self, result: serde_json::Value) -> RpcResponse { + RpcResponse { + jsonrpc: JsonRpcVersion::V2, + id: self.id, + result, + } + } +} + +#[derive(Serialize, Debug)] +struct RpcResponse { + jsonrpc: JsonRpcVersion, + id: u32, + result: serde_json::Value, +} + +pub(super) struct TaskIoActor { + stdin: Arc>, + action: A, + task: Addr>, + console: Addr, + latest_level: OutputLevel, + results: Vec>, +} + +impl TaskIoActor { + pub fn init( + task: Addr>, + action: A, + process: &mut Box, + console: Addr, + ) -> Result> { + let stdin = process + .stdin() + .take() + .ok_or_else(|| ferr!("could not take stdin handle from the process"))?; + let stdout = process + .stdout() + .take() + .ok_or_else(|| ferr!("could not take stdout handle from the process"))?; + + let addr = Self::create(move |ctx| { + let actor = Self { + stdin: Arc::new(Mutex::new(stdin)), + action, + task, + console: console.clone(), + latest_level: OutputLevel::Info, + results: vec![], + }; + + let self_addr = ctx.address().downgrade(); + let scope = actor.get_console_scope(); + let fut = async move { + let mut reader = BufReader::new(stdout).lines(); + loop { + match reader.next_line().await { + Ok(Some(line)) => { + let self_addr = self_addr.upgrade().expect( + "unreachable: future should have been cancelled when self dropped", + ); + self_addr.do_send(message::OutputLine(line)) + } + Ok(None) => { + break; + } + Err(err) => { + console.error(format!("{scope} failed to read from stdout: {err}")); + break; + } + } + } + console.debug("task i/o actor finished reading from stdout".to_string()); + let self_addr = self_addr + .upgrade() + .expect("future should have been cancelled when self dropped"); + self_addr.do_send(message::Exit); + }; + + ctx.spawn(fut.in_current_span().into_actor(&actor)); + + actor + }); + + Ok(addr) + } +} + +impl Actor for TaskIoActor { + type Context = Context; + + fn started(&mut self, _ctx: &mut Context) { + trace!("task i/o actor started"); + } + + fn stopped(&mut self, _ctx: &mut Self::Context) { + trace!("task i/o actor stopped"); + } +} + +impl Handler for TaskIoActor { + type Result = (); + + fn handle(&mut self, message::OutputLine(line): message::OutputLine, ctx: &mut Context) { + let console = &self.console; + let scope = self.get_console_scope(); + + match line.split_once(": ") { + Some((prefix, tail)) => { + trace!("prefix: {prefix}"); + match prefix { + "debug" => { + console.debug(format!("{scope} {tail}")); + self.latest_level = OutputLevel::Debug; + } + "info" => { + console.info(format!("{scope} {tail}")); + self.latest_level = OutputLevel::Info; + } + "warning" => { + console.warning(format!("{scope} {tail}")); + self.latest_level = OutputLevel::Warning; + } + "error" => { + console.error(format!("{scope} {tail}")); + self.latest_level = OutputLevel::Error; + } + "success" => { + match serde_json::from_str(tail) { + Ok(data) => self.results.push(Ok(data)), + Err(err) => { + console.error(format!("{scope} failed to process message: {err}")); + // TODO fail task? + } + } + } + "failure" => { + match serde_json::from_str(tail) { + Ok(data) => { + self.results.push(Err(data)); + } + Err(err) => { + console.error(format!("{scope} failed to process message: {err}")); + // TODO fail task? + } + } + } + "jsonrpc" => { + match serde_json::from_str(tail) { + Ok(req) => self.handle_rpc_request(req, ctx.address(), ctx), + Err(err) => { + console.error(format!("{scope} failed to process message: {err}")); + // TODO fail task? + } + } + } + + _ => self.handle_continuation(&line), + } + } + None => { + self.handle_continuation(&line); + } + } + } +} + +impl TaskIoActor { + fn get_console_scope(&self) -> String { + let path = self.action.get_task_ref().path.to_str().unwrap(); + format!("[{path}]", path = path.yellow()) + } + + // process as continuation to previous output + fn handle_continuation(&self, line: &str) { + let console = &self.console; + let scope = self.get_console_scope(); + + match self.latest_level { + OutputLevel::Debug => { + console.debug(format!("{scope}>{line}")); + } + OutputLevel::Info => { + console.info(format!("{scope}>{line}")); + } + OutputLevel::Warning => { + console.warning(format!("{scope}>{line}")); + } + OutputLevel::Error => { + console.error(format!("{scope}>{line}")); + } + } + } + + fn handle_rpc_request(&self, req: RpcRequest, self_addr: Addr, ctx: &mut Context) { + match serde_json::from_value::(req.call.clone()) { + Ok(rpc_call) => { + let console = self.console.clone(); + let action = self.action.clone(); + let scope = self.get_console_scope(); + + let fut = async move { + let id = req.id; + match action.get_rpc_response(&rpc_call).await { + Ok(response) => { + self_addr.do_send(message::SendRpcResponse(req.response(response))); + } + Err(err) => { + console.error(format!( + "{scope} failed to handle jsonrpc call {req:?}: {err}" + )); + // TODO fail task? + } + } + }; + ctx.spawn(fut.in_current_span().into_actor(self)); + } + Err(err) => { + self.console.error(format!( + "{scope} invalid jsonrpc request {req:?}: {err}", + scope = format!("[{path}]", path = self.get_console_scope()) + )); + } + } + } +} + +impl Handler for TaskIoActor { + type Result = (); + + fn handle( + &mut self, + message::SendRpcResponse(response): message::SendRpcResponse, + ctx: &mut Context, + ) { + match serde_json::to_string(&response) { + Ok(mut response) => { + let stdin = self.stdin.clone(); + response.push('\n'); + let fut = async move { + let mut stdin = stdin.lock().await; + stdin + .write_all(response.as_bytes()) + .await + .expect("could not write rpc response to stdin"); + }; + ctx.spawn(fut.in_current_span().into_actor(self)); + } + Err(e) => { + self.console + .error(format!("could not serialize rpc response {e}")); + } + } + } +} + +impl Handler for TaskIoActor { + type Result = (); + + fn handle(&mut self, _message: message::Exit, ctx: &mut Context) { + self.task + .do_send(super::task::message::Results(std::mem::take( + &mut self.results, + ))); + ctx.stop(); + } +} diff --git a/meta-cli/src/deploy/actors/task_manager.rs b/meta-cli/src/deploy/actors/task_manager.rs index aa6cba18e2..b3c298c4a6 100644 --- a/meta-cli/src/deploy/actors/task_manager.rs +++ b/meta-cli/src/deploy/actors/task_manager.rs @@ -377,6 +377,7 @@ impl Handler for TaskManager { ); let task_addr = TaskActor::new( self.init_params.config.clone(), + self.init_params.action_generator.clone(), action, ctx.address(), self.console.clone(), @@ -404,7 +405,7 @@ impl Handler> for TaskManager { } TaskFinishStatus::Finished(results) => { // TODO partial retry - if multiple typegraphs in a single file - if results.iter().any(|r| matches!(r, Err(_))) { + if results.iter().any(|r| matches!(r.1, Err(_))) { next_retry_no = Some(message.task_ref.retry_no + 1); } } diff --git a/meta-cli/src/deploy/actors/task_manager/report.rs b/meta-cli/src/deploy/actors/task_manager/report.rs index bbc5e79826..b3ba0ac1a7 100644 --- a/meta-cli/src/deploy/actors/task_manager/report.rs +++ b/meta-cli/src/deploy/actors/task_manager/report.rs @@ -2,7 +2,10 @@ // SPDX-License-Identifier: MPL-2.0 use super::StopReason; -use crate::deploy::actors::task::{action::TaskAction, TaskFinishStatus}; +use crate::deploy::actors::task::{ + action::{OutputData, TaskAction}, + TaskFinishStatus, +}; use color_eyre::owo_colors::OwoColorize; use std::{path::Path, sync::Arc}; @@ -34,7 +37,12 @@ impl Report { |mut summary, entry| { let (text, success) = match &entry.status { TaskFinishStatus::::Finished(results) => { - let success_count = results.iter().filter(|res| res.is_ok()).count(); + let success_count = results + .iter() + .filter(|(_, res)| { + res.as_ref().ok().map(|r| r.is_success()).unwrap_or(false) + }) + .count(); ( format!("{}/{} success", success_count, results.len()), success_count == results.len(), diff --git a/typegraph/node/sdk/src/envs/cli.ts b/typegraph/node/sdk/src/envs/cli.ts new file mode 100644 index 0000000000..e19fedc346 --- /dev/null +++ b/typegraph/node/sdk/src/envs/cli.ts @@ -0,0 +1,100 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + +const requiredCliEnvs = [ + "version", + "command", + "typegraph_path", + "filter", + "config_dir", + "working_dir", + "migrations_dir", +] as const; + +const optionalCliEnvs = ["prefix"] as const; + +const COMMANDS = ["serialize", "deploy"] as const; +type Command = typeof COMMANDS extends ReadonlyArray ? C : never; + +export interface CliEnv { + version: string; + command: Command; + typegraph_path: string; + filter: string[] | null; + config_dir: string; + working_dir: string; + migrations_dir: string; + prefix?: string; +} + +export function loadCliEnv(): CliEnv | null { + const record: Partial = {}; + const env = process.env; + const missing: string[] = []; + + for (const key of requiredCliEnvs) { + const name = `MCLI_${key.toLocaleUpperCase()}`; + const envValue = env[name]; + if (envValue == null) { + missing.push(name); + } else { + switch (key) { + case "command": + if (!COMMANDS.includes(envValue as any)) { + throw new Error( + `${name} env value should be one of: serialize, deploy`, + ); + } + record[key] = envValue as Command; + break; + + case "filter": + if (envValue === "all") { + record[key] = null; + } else { + const prefix = "typegraph="; + if (!envValue.startsWith(prefix)) { + throw new Error(`invalid ${name} env value: ${envValue}`); + } else { + record[key] = envValue.slice(prefix.length).split(","); + } + } + break; + + default: + record[key] = envValue; + break; + } + } + } + + for (const key of optionalCliEnvs) { + const name = `MCLI_${key.toLocaleUpperCase()}`; + const envValue = env[name]; + if (envValue != null) { + record[key] = envValue; + } + } + + if (missing.length > 0) { + if (Object.keys(record).length === 0) { + return null; + } + throw new Error(`required environment variables: ${missing.join(", ")}`); + } + + return record as CliEnv; +} + +export const CLI_ENV = loadCliEnv(); + +export function hasCliEnv() { + return CLI_ENV != null; +} + +export function getCliEnv() { + if (CLI_ENV == null) { + throw new Error("cannot be called in this context"); + } + return CLI_ENV; +} diff --git a/typegraph/node/sdk/src/io.ts b/typegraph/node/sdk/src/io.ts index 5e5aea0855..ef364f5363 100644 --- a/typegraph/node/sdk/src/io.ts +++ b/typegraph/node/sdk/src/io.ts @@ -1,7 +1,7 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 import { inspect } from "node:util"; -import { createInterface, Interface } from "node:readline"; +// import { createInterface, Interface } from "node:readline"; /** * see: module level documentation `meta-cli/src/deploy/actors/task.rs` @@ -89,14 +89,16 @@ class RpcResponseReader { } } -let rpcCall = (() => { +const JSONRPC_VERSION = "2.0"; + +const rpcCall = (() => { const responseReader = new RpcResponseReader(); let latestRpcId = 0; return (method: string, params: any = null) => { const rpcId = latestRpcId++; const rpcMessage = JSON.stringify({ - jsonrpc: "2.0", + jsonrpc: JSONRPC_VERSION, id: rpcId, method, params, @@ -107,37 +109,28 @@ let rpcCall = (() => { }; })(); -export interface TypegateConfig { - endpoint: string; +export interface DeployTarget { + base_url: string; auth: { username: string; password: string; }; } -export interface GlobalConfig { - typegate: TypegateConfig | null; // null for serialize - prefix: string | null; - // TODO codegen - // TODO base migration directory -} - export interface MigrationAction { apply: boolean; create: boolean; reset: boolean; } -export interface TypegraphConfig { +export interface DeployData { secrets: Record; - artifactResolution: boolean; - migrationActions: Record; defaultMigrationAction: MigrationAction; - migrationsDir: string; + migrationActions: Record; } export const rpc = { - getGlobalConfig: () => rpcCall("queryGlobalConfig") as Promise, - getTypegraphConfig: (typegraph: string) => - rpcCall("queryTypegraphConfig", { typegraph }) as Promise, + getDeployTarget: () => rpcCall("GetDeployTarget") as Promise, + getDeployData: (typegraph: string) => + rpcCall("GetDeployData", { typegraph }) as Promise, }; diff --git a/typegraph/node/sdk/src/tg_manage.ts b/typegraph/node/sdk/src/tg_manage.ts index c8b5f75f80..ce12f9fa20 100644 --- a/typegraph/node/sdk/src/tg_manage.ts +++ b/typegraph/node/sdk/src/tg_manage.ts @@ -4,85 +4,53 @@ import { FinalizeParams } from "./gen/interfaces/metatype-typegraph-core.js"; import { BasicAuth, tgDeploy } from "./tg_deploy.js"; import { TgFinalizationResult, TypegraphOutput } from "./typegraph.js"; -import { getEnvVariable } from "./utils/func_utils.js"; import { freezeTgOutput } from "./utils/func_utils.js"; -import { GlobalConfig, log, rpc, TypegraphConfig } from "./io.js"; - -const PORT = "MCLI_SERVER_PORT"; // meta-cli instance that executes the current file -const SELF_PATH = "MCLI_TG_PATH"; // path to the current file to uniquely identify the run results - -type Command = "serialize" | "deploy" | "codegen"; +import { log, rpc } from "./io.js"; +import { CliEnv, getCliEnv } from "./envs/cli.js"; export class Manager { #typegraph: TypegraphOutput; - #typegraphPath: string; - - static #globalConfig: GlobalConfig | null = null; - static async getGlobalConfig(): Promise { - if (Manager.#globalConfig == null) { - Manager.#globalConfig = await rpc.getGlobalConfig(); - } - return Manager.#globalConfig; - } - - static #command: Command | null = null; - static getCommand(): Command { - if (Manager.#command == null) { - Manager.#command = getEnvVariable("MCLI_ACTION") as Command; - } - return Manager.#command; - } - - static isRunFromCLI(): boolean { - return !!getEnvVariable(PORT); - } + #env: CliEnv; - public static async init(typegraph: TypegraphOutput) { - const globalConfig = await Manager.getGlobalConfig(); - const typegraphConfig = await rpc.getTypegraphConfig(typegraph.name); - return new Manager(typegraph, globalConfig, typegraphConfig); - } - - private constructor( - typegraph: TypegraphOutput, - private globalConfig: GlobalConfig, - private typegraphConfig: TypegraphConfig, - ) { + constructor(typegraph: TypegraphOutput) { this.#typegraph = typegraph; - this.#typegraphPath = getEnvVariable(SELF_PATH)!; + this.#env = getCliEnv(); } async run() { - const command = Manager.getCommand(); - - const finalizeParams = { - typegraphPath: this.#typegraphPath, - prefix: this.globalConfig.prefix ?? undefined, - artifactResolution: true, - codegen: false, - prismaMigration: { - migrationsDir: this.typegraphConfig.migrationsDir, - migrationActions: Object.entries(this.typegraphConfig.migrationActions), - defaultMigrationAction: this.typegraphConfig.defaultMigrationAction, - }, - } as FinalizeParams; - - switch (command) { + switch (this.#env.command) { case "serialize": - await this.#serialize(finalizeParams); + await this.#serialize(); break; case "deploy": - await this.#deploy(finalizeParams); + await this.#deploy(); break; default: - throw new Error(`command ${command} from meta-cli not supported`); + throw new Error( + `command ${this.#env.command} from meta-cli not supported`, + ); } } - async #serialize(config: FinalizeParams): Promise { + async #serialize(): Promise { let finalizationResult: TgFinalizationResult; try { - finalizationResult = this.#typegraph.serialize(config); + const env = this.#env; + finalizationResult = this.#typegraph.serialize({ + typegraphPath: env.typegraph_path, + prefix: env.prefix, + artifactResolution: true, + codegen: false, + prismaMigration: { + migrationsDir: env.migrations_dir, + migrationActions: [], + defaultMigrationAction: { + apply: true, + create: false, + reset: false, + }, + }, + }); log.success(finalizationResult.tgJson, true); } catch (err: any) { log.failure({ @@ -92,21 +60,29 @@ export class Manager { } } - async #deploy(finalizeParams: FinalizeParams): Promise { - const { endpoint, auth } = this.globalConfig.typegate!; - if (!auth) { - throw new Error( - `"${this.#typegraph.name}" received null or undefined "auth" field on the configuration`, - ); - } + async #deploy(): Promise { + const deployData = await rpc.getDeployData(this.#typegraph.name); + + const env = this.#env; + const params: FinalizeParams = { + typegraphPath: env.typegraph_path, + prefix: env.prefix, + artifactResolution: true, + codegen: false, + prismaMigration: { + migrationsDir: env.migrations_dir, + migrationActions: Object.entries(deployData.migrationActions), + defaultMigrationAction: deployData.defaultMigrationAction, + }, + }; // hack for allowing tg.serialize(config) to be called more than once - const frozenOut = freezeTgOutput(finalizeParams, this.#typegraph); + const frozenOut = freezeTgOutput(params, this.#typegraph); // hack for allowing tg.serialize(config) to be called more than once let frozenSerialized: TgFinalizationResult; try { - frozenSerialized = frozenOut.serialize(finalizeParams); + frozenSerialized = frozenOut.serialize(params); } catch (err: any) { log.failure({ typegraph: this.#typegraph.name, @@ -119,23 +95,27 @@ export class Manager { serialize: () => frozenSerialized, } as TypegraphOutput; - if (finalizeParams.codegen) { + if (params.codegen) { // TODO throw new Error("not implemented"); } try { + const deployTarget = await rpc.getDeployTarget(); const { response } = await tgDeploy(reusableTgOutput, { typegate: { - url: endpoint, - auth: new BasicAuth(auth.username, auth.password), + url: deployTarget.base_url, + auth: new BasicAuth( + deployTarget.auth.username, + deployTarget.auth.password, + ), }, - typegraphPath: this.#typegraphPath, - prefix: finalizeParams.prefix, - secrets: this.typegraphConfig.secrets, - migrationsDir: this.typegraphConfig.migrationsDir, - migrationActions: this.typegraphConfig.migrationActions, - defaultMigrationAction: this.typegraphConfig.defaultMigrationAction, + typegraphPath: env.typegraph_path, + prefix: env.prefix, + secrets: deployData.secrets, + migrationsDir: env.migrations_dir, + migrationActions: deployData.migrationActions, + defaultMigrationAction: deployData.defaultMigrationAction, }); log.success({ typegraph: this.#typegraph.name, ...response }); @@ -147,43 +127,4 @@ export class Manager { return; } } - - // async #relayResultToCLI(initiator: Command, data: T) { - // const typegraphName = this.#typegraph.name; - // const response: SDKResponse = { - // command: initiator, - // typegraphName, - // typegraphPath: this.#typegraphPath, - // data, - // }; - // await fetch(new URL("response", this.#endpoint), { - // method: "POST", - // headers: { "Content-Type": "application/json" }, - // body: JSON.stringify(response), - // }); - // } - - // async #relayErrorToCLI( - // initiator: Command, - // code: string, - // msg: string, - // value: string | any, - // ) { - // const typegraphName = this.#typegraph.name; - // const response: SDKResponse = { - // command: initiator, - // typegraphName, - // typegraphPath: this.#typegraphPath, - // error: { - // code, - // msg, - // value, - // }, - // }; - // await fetch(new URL("response", this.#endpoint), { - // method: "POST", - // headers: { "Content-Type": "application/json" }, - // body: JSON.stringify(response), - // }); - // } } diff --git a/typegraph/node/sdk/src/typegraph.ts b/typegraph/node/sdk/src/typegraph.ts index 9e90b56d7b..c034f23acb 100644 --- a/typegraph/node/sdk/src/typegraph.ts +++ b/typegraph/node/sdk/src/typegraph.ts @@ -18,6 +18,7 @@ import { } from "./gen/interfaces/metatype-typegraph-core.js"; import { Manager } from "./tg_manage.js"; import { log } from "./io.js"; +import { hasCliEnv } from "./envs/cli.js"; type Exports = Record; @@ -239,8 +240,8 @@ export async function typegraph( name, } as TypegraphOutput; - if (Manager.isRunFromCLI()) { - const manager = await Manager.init(ret); + if (hasCliEnv()) { + const manager = new Manager(ret); await manager.run(); // TODO solve hanging process (stdin??) @@ -251,6 +252,7 @@ export async function typegraph( } }, 10); } + --counter; return ret; diff --git a/typegraph/python/typegraph/envs/cli.py b/typegraph/python/typegraph/envs/cli.py index a09a126128..03f7254e94 100644 --- a/typegraph/python/typegraph/envs/cli.py +++ b/typegraph/python/typegraph/envs/cli.py @@ -6,8 +6,6 @@ from os import environ from enum import Enum -from typegraph.io import Log - _required_cli_envs = ( "version", "command", @@ -75,8 +73,6 @@ def load(cls) -> Optional["CliEnv"]: filter = raw_filter.removeprefix("typegraph=").split(",") d["filter"] = filter - Log.debug(d) - return cls(**d) @@ -85,5 +81,5 @@ def load(cls) -> Optional["CliEnv"]: def get_cli_env(): if CLI_ENV is None: - raise Exception("cannot be called on this context") + raise Exception("cannot be called in this context") return CLI_ENV diff --git a/typegraph/python/typegraph/io.py b/typegraph/python/typegraph/io.py index 4f7dc75f8e..92bb66f384 100644 --- a/typegraph/python/typegraph/io.py +++ b/typegraph/python/typegraph/io.py @@ -9,7 +9,7 @@ import json -_JSON_RPC_VERSION = "2.0" +_JSONRPC_VERSION = "2.0" class Log: @@ -60,7 +60,7 @@ def read(self, rpc_id: int): Log.error("rpc response: failed to parse input as json") continue - if parsed.get("jsonrpc") != _JSON_RPC_VERSION: + if parsed.get("jsonrpc") != _JSONRPC_VERSION: Log.error("rpc response: invalid jsonrpc version") continue @@ -83,7 +83,7 @@ def call(cls, method: str, params: Any): rpc_id = cls.latest_rpc_id rpc_message = json.dumps( { - "jsonrpc": _JSON_RPC_VERSION, + "jsonrpc": _JSONRPC_VERSION, "id": rpc_id, "method": method, "params": params, @@ -115,15 +115,6 @@ def migration_action_from_dict(raw: Dict[str, bool]) -> MigrationAction: ) -@dataclass -class TypegraphConfig: - secrets: Dict[str, str] - artifact_resolution: bool - migration_actions: Dict[str, MigrationAction] - default_migration_action: MigrationAction - migrations_dir: str - - class Rpc: _deploy_target: Optional[DeployTarget] = None From f98adbfe284b971f0c7d4e1279f0f7379f49f1f7 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Tue, 11 Jun 2024 14:54:20 +0300 Subject: [PATCH 15/35] fix migrations --- meta-cli/src/deploy/actors/task_manager.rs | 1 - .../runtimes/prisma/hooks/run_migrations.ts | 27 +- typegate/tests/e2e/cli/deploy_test.ts | 580 +++++++++--------- typegate/tests/e2e/cli/dev_test.ts | 69 ++- typegraph/node/sdk/src/envs/cli.ts | 2 +- typegraph/python/typegraph/envs/cli.py | 7 +- typegraph/python/typegraph/graph/tg_manage.py | 10 +- 7 files changed, 362 insertions(+), 334 deletions(-) diff --git a/meta-cli/src/deploy/actors/task_manager.rs b/meta-cli/src/deploy/actors/task_manager.rs index b3c298c4a6..f4b8d292e6 100644 --- a/meta-cli/src/deploy/actors/task_manager.rs +++ b/meta-cli/src/deploy/actors/task_manager.rs @@ -312,7 +312,6 @@ impl Actor for TaskManager { .collect(), }; - debug!("sending report: {:?}", report); self.report_tx.take().unwrap().send(report).unwrap_or_log(); } } diff --git a/typegate/src/runtimes/prisma/hooks/run_migrations.ts b/typegate/src/runtimes/prisma/hooks/run_migrations.ts index 1b35001f2f..7674a6dc0e 100644 --- a/typegate/src/runtimes/prisma/hooks/run_migrations.ts +++ b/typegate/src/runtimes/prisma/hooks/run_migrations.ts @@ -16,7 +16,10 @@ import type { ParsedDiff } from "../../../../engine/runtime.js"; export class MigrationFailure extends Error { errors: PushFailure[] = []; - private constructor(message: string, public runtimeName?: string) { + private constructor( + message: string, + public runtimeName?: string, + ) { super(message); } @@ -45,7 +48,8 @@ export class MigrationFailure extends Error { const prefix = "ERROR: "; const prefixLen = prefix.length; - err.errors = message.split("\n") + err.errors = message + .split("\n") .filter((line) => line.startsWith(prefix)) .map((line) => line.slice(prefixLen)) .map((err) => { @@ -53,9 +57,9 @@ export class MigrationFailure extends Error { if (match != null) { const { table, col } = match.groups!; - const isNewColumn = diff?.find((d) => - d.table === table - )?.diff.find((d) => d.column === col)?.diff.action !== "Altered"; + const isNewColumn = diff + ?.find((d) => d.table === table) + ?.diff.find((d) => d.column === col)?.diff.action !== "Altered"; return { reason: "NullConstraintViolation", message: [ @@ -91,8 +95,8 @@ export const runMigrations: PushHandler = async ( response, ) => { // TODO simpler: Use only one type for prisma runtime data, with some optional fields that would be set by hooks - const runtimes = typegraph.runtimes.filter((rt) => - rt.name === "prisma" + const runtimes = typegraph.runtimes.filter( + (rt) => rt.name === "prisma", ) as PrismaRT.DS[]; for (const rt of runtimes) { @@ -100,13 +104,14 @@ export const runMigrations: PushHandler = async ( response.warn(`Migrations disabled for runtime ${rt.data.name}`); continue; } + console.debug("migration options", rt.data.migration_options); const migration = new Migration(rt.data, secretManager, response); try { await migration.run(); } catch (err) { - const error = (err instanceof MigrationFailure) + const error = err instanceof MigrationFailure ? err : MigrationFailure.fromErrorMessage(err.message, rt.data.name); response.setFailure(error.errors[0]); @@ -144,7 +149,8 @@ class Migration { async run() { const migrations = this.#options.migration_files; - if (this.#options.create) { // like `prisma dev` + if (this.#options.create) { + // like `prisma dev` // apply pending migrations if (migrations != null) { await this.#opApply(migrations); @@ -155,7 +161,8 @@ class Migration { // create new migration await this.#opCreate(diff); } - } else { // like `prisma deploy` + } else { + // like `prisma deploy` if (migrations == null) { this.#warn( [ diff --git a/typegate/tests/e2e/cli/deploy_test.ts b/typegate/tests/e2e/cli/deploy_test.ts index dfeb29af6b..56c156d854 100644 --- a/typegate/tests/e2e/cli/deploy_test.ts +++ b/typegate/tests/e2e/cli/deploy_test.ts @@ -37,13 +37,15 @@ interface DeployOptions { secrets?: Record; } -async function deploy( - { port, noMigration = false, secrets = {} }: DeployOptions, -) { +async function deploy({ + port, + noMigration = false, + secrets = {}, +}: DeployOptions) { const migrationOpts = noMigration ? [] : ["--create-migration"]; - const secretOpts = Object.entries(secrets).flatMap(( - [key, value], - ) => `--secret=${key}=${value}`); + const secretOpts = Object.entries(secrets).flatMap( + ([key, value]) => `--secret=${key}=${value}`, + ); try { const out = await m.cli( @@ -75,293 +77,305 @@ async function deploy( } } -Meta.test({ - name: "meta deploy: fails migration for new columns without default value", -}, async (t) => { - const schema = randomSchema(); - const secrets = { - POSTGRES: - `postgresql://postgres:password@localhost:5432/db?schema=${schema}`, - }; - await t.should("load first version of the typegraph", async () => { - await reset(tgName, schema); - await writeTypegraph(null); - }); - - const port = t.port!; - - // `deploy` must be run outside of the `should` block, - // otherwise this would fail by leaking ops. - // That is expected since it creates new engine that persists beyond the - // `should` block. - await deploy({ port, secrets }); - - await t.should("insert records", async () => { - const e = t.getTypegraphEngine(tgName); - if (!e) { - throw new Error("typegraph not found"); - } +Meta.test( + { + name: "meta deploy: fails migration for new columns without default value", + }, + async (t) => { + const schema = randomSchema(); + const secrets = { + POSTGRES: + `postgresql://postgres:password@localhost:5432/db?schema=${schema}`, + }; + await t.should("load first version of the typegraph", async () => { + await reset(tgName, schema); + await writeTypegraph(null); + }); + + const port = t.port!; + + // `deploy` must be run outside of the `should` block, + // otherwise this would fail by leaking ops. + // That is expected since it creates new engine that persists beyond the + // `should` block. + await deploy({ port, secrets }); - await gql` - mutation { - createRecord(data: {}) { - id - } + await t.should("insert records", async () => { + const e = t.getTypegraphEngine(tgName); + if (!e) { + throw new Error("typegraph not found"); } - ` - .expectData({ - createRecord: { - id: 1, - }, - }) - .on(e); - }); - - await t.should("load second version of the typegraph", async () => { - await writeTypegraph(1); - }); - try { - await reset(tgName, schema); - await deploy({ port, secrets }); - } catch (e) { - assertStringIncludes( - e.message, - // 'column "age" of relation "Record" contains null values: set a default value:', - 'column "age" of relation "Record" contains null values', - ); - } -}); - -Meta.test({ - name: "meta deploy: succeeds migration for new columns with default value", -}, async (t) => { - const port = t.port!; - const schema = randomSchema(); - const secrets = { - POSTGRES: - `postgresql://postgres:password@localhost:5432/db?schema=${schema}`, - }; - await t.should("load first version of the typegraph", async () => { - await reset(tgName, schema); - await writeTypegraph(null); - }); - - await deploy({ port, secrets }); - - await t.should("insert records", async () => { - const e = t.getTypegraphEngine(tgName)!; - - await gql` - mutation { - createRecord(data: {}) { - id + await gql` + mutation { + createRecord(data: {}) { + id + } } - } - ` - .expectData({ - createRecord: { - id: 1, - }, - }) - .on(e); - }); - - await t.should("load second version of the typegraph", async () => { - await writeTypegraph(3); // int - }); - - await deploy({ port, secrets }); - - await t.should("load third version of the typegraph", async () => { - await writeTypegraph(4); // string - }); - - await deploy({ port, secrets }); -}); - -Meta.test({ - name: "cli:deploy - automatic migrations", - - gitRepo: { - content: { - "prisma.py": "runtimes/prisma/prisma.py", - "metatype.yml": "metatype.yml", - }, + ` + .expectData({ + createRecord: { + id: 1, + }, + }) + .on(e); + }); + + await t.should("load second version of the typegraph", async () => { + await writeTypegraph(1); + }); + + try { + await reset(tgName, schema); + await deploy({ port, secrets }); + } catch (e) { + assertStringIncludes( + e.message, + // 'column "age" of relation "Record" contains null values: set a default value:', + 'column "age" of relation "Record" contains null values', + ); + } }, -}, async (t) => { - const port = t.port!; - const schema = randomSchema(); - const e = await t.engine("prisma.py", { - secrets: { +); + +Meta.test( + { + name: "meta deploy: succeeds migration for new columns with default value", + }, + async (t) => { + const port = t.port!; + const schema = randomSchema(); + const secrets = { POSTGRES: `postgresql://postgres:password@localhost:5432/db?schema=${schema}`, + }; + await t.should("load first version of the typegraph", async () => { + await reset(tgName, schema); + await writeTypegraph(null); + }); + + await deploy({ port, secrets }); + + await t.should("insert records", async () => { + const e = t.getTypegraphEngine(tgName)!; + + await gql` + mutation { + createRecord(data: {}) { + id + } + } + ` + .expectData({ + createRecord: { + id: 1, + }, + }) + .on(e); + }); + + await t.should("load second version of the typegraph", async () => { + await writeTypegraph(3); // int + }); + + await deploy({ port, secrets }); + + await t.should("load third version of the typegraph", async () => { + await writeTypegraph(4); // string + }); + + await deploy({ port, secrets }); + }, +); + +Meta.test( + { + name: "cli:deploy - automatic migrations", + + gitRepo: { + content: { + "prisma.py": "runtimes/prisma/prisma.py", + "metatype.yml": "metatype.yml", + }, }, - }); - - await dropSchemas(e); - await removeMigrations(e); - - const nodeConfigs = [ - "--target", - "dev", - "--gate", - `http://localhost:${port}`, - "--secret", - `prisma:POSTGRES=postgresql://postgres:password@localhost:5432/db?schema=${schema}`, - ]; - - await t.should("fail to access database", async () => { - await gql` - query { - findManyRecords { - id + }, + async (t) => { + const port = t.port!; + const schema = randomSchema(); + const e = await t.engine("prisma.py", { + secrets: { + POSTGRES: + `postgresql://postgres:password@localhost:5432/db?schema=${schema}`, + }, + }); + + await dropSchemas(e); + await removeMigrations(e); + + const nodeConfigs = [ + "--target", + "dev", + "--gate", + `http://localhost:${port}`, + "--secret", + `prisma:POSTGRES=postgresql://postgres:password@localhost:5432/db?schema=${schema}`, + ]; + + await t.should("fail to access database", async () => { + await gql` + query { + findManyRecords { + id + } } - } - ` - .expectErrorContains(`table \`${schema}.record\` does not exist`) - .on(e); - }); - - await t.should("fail on dirty repo", async () => { - await t.shell(["bash", "-c", "touch README.md"]); - await assertRejects(() => - t.meta(["deploy", ...nodeConfigs, "-f", "prisma.py"]) - ); - }); - - await t.should("commit changes", async () => { - await t.shell(["git", "add", "."]); - await t.shell(["git", "commit", "-m", "create migrations"]); - }); - - // not in t.should because it creates a worker that will not be closed - await t.meta([ - "deploy", - ...nodeConfigs, - "-f", - "prisma.py", - "--create-migration", - ]); - - // TODO: MET-500 - // Does not work with the new version of t.e engine - // await t.should( - // "have replaced and terminated the previous engine", - // async () => { - // await gql` - // query { - // findManyRecords { - // id - // } - // } - // ` - // .expectErrorContains("Could not find engine") - // .on(e); - // }, - // ); - - const e2 = t.getTypegraphEngine("prisma")!; - - await t.should("succeed to query database", async () => { - await gql` - query { - findManyRecords { - id - name + ` + .expectErrorContains(`table \`${schema}.record\` does not exist`) + .on(e); + }); + + await t.should("fail on dirty repo", async () => { + await t.shell(["bash", "-c", "touch README.md"]); + await assertRejects(() => + t.meta(["deploy", ...nodeConfigs, "-f", "prisma.py"]) + ); + }); + + await t.should("commit changes", async () => { + await t.shell(["git", "add", "."]); + await t.shell(["git", "commit", "-m", "create migrations"]); + }); + + // not in t.should because it creates a worker that will not be closed + await t.meta([ + "deploy", + ...nodeConfigs, + "-f", + "prisma.py", + "--create-migration", + ]); + + // TODO: MET-500 + // Does not work with the new version of t.e engine + // await t.should( + // "have replaced and terminated the previous engine", + // async () => { + // await gql` + // query { + // findManyRecords { + // id + // } + // } + // ` + // .expectErrorContains("Could not find engine") + // .on(e); + // }, + // ); + + const e2 = t.getTypegraphEngine("prisma")!; + + await t.should("succeed to query database", async () => { + await gql` + query { + findManyRecords { + id + name + } } - } - ` - .expectData({ - findManyRecords: [], - }) - .on(e2); - }); -}); - -Meta.test({ - name: "cli:deploy - with prefix", - - gitRepo: { - content: { - "prisma.py": "runtimes/prisma/prisma.py", - "metatype.yml": "metatype.yml", - }, + ` + .expectData({ + findManyRecords: [], + }) + .on(e2); + }); }, -}, async (t) => { - const schema = randomSchema(); - const e = await t.engine("prisma.py", { - secrets: { - POSTGRES: - `postgresql://postgres:password@localhost:5432/db?schema=${schema}`, +); + +Meta.test( + { + name: "cli:deploy - with prefix", + + gitRepo: { + content: { + "prisma.py": "runtimes/prisma/prisma.py", + "metatype.yml": "metatype.yml", + }, }, - prefix: "pref-", - }); - - await dropSchemas(e); - await removeMigrations(e); - - const nodeConfigs = [ - "-t", - "with_prefix", - "--gate", - `http://localhost:${t.port}`, - "--secret", - `prisma:POSTGRES=postgresql://postgres:password@localhost:5432/db?schema=${schema}`, - ]; - - await t.should("fail to access database", async () => { - await gql` - query { - findManyRecords { - id - } - } - ` - .expectErrorContains(`table \`${schema}.record\` does not exist`) - .on(e); - }); - - // not in t.should because it creates a worker that will not be closed - await t.meta([ - "deploy", - ...nodeConfigs, - "-f", - "prisma.py", - "--create-migration", - ]); - - // TODO: MET-500 - // Does not work with the new version of t.e engine - // await t.should( - // "succeed have replaced and terminated the previous engine", - // async () => { - // await gql` - // query { - // findManyRecords { - // id - // } - // } - // ` - // .expectErrorContains("Could not find engine") - // .on(e); - // }, - // ); - - const e2 = t.getTypegraphEngine("pref-prisma")!; - - await t.should("succeed to query database", async () => { - await gql` - query { - findManyRecords { - id - name + }, + async (t) => { + const schema = randomSchema(); + const e = await t.engine("prisma.py", { + secrets: { + POSTGRES: + `postgresql://postgres:password@localhost:5432/db?schema=${schema}`, + }, + prefix: "pref-", + }); + + await dropSchemas(e); + await removeMigrations(e); + + const nodeConfigs = [ + "-t", + "with_prefix", + "--gate", + `http://localhost:${t.port}`, + "--secret", + `prisma:POSTGRES=postgresql://postgres:password@localhost:5432/db?schema=${schema}`, + ]; + + await t.should("fail to access database", async () => { + await gql` + query { + findManyRecords { + id + } } - } - ` - .expectData({ - findManyRecords: [], - }) - .on(e2); - }); -}); + ` + .expectErrorContains(`table \`${schema}.record\` does not exist`) + .on(e); + }); + + // not in t.should because it creates a worker that will not be closed + await t.meta([ + "deploy", + ...nodeConfigs, + "-f", + "prisma.py", + "--create-migration", + ]); + + // TODO: MET-500 + // Does not work with the new version of t.e engine + // await t.should( + // "succeed have replaced and terminated the previous engine", + // async () => { + // await gql` + // query { + // findManyRecords { + // id + // } + // } + // ` + // .expectErrorContains("Could not find engine") + // .on(e); + // }, + // ); + + // const e2 = t.getTypegraphEngine("pref-prisma")!; + + // await t.should("succeed to query database", async () => { + // await gql` + // query { + // findManyRecords { + // id + // name + // } + // } + // ` + // .expectData({ + // findManyRecords: [], + // }) + // .on(e2); + // }); + }, +); diff --git a/typegate/tests/e2e/cli/dev_test.ts b/typegate/tests/e2e/cli/dev_test.ts index e0262b5709..b2ddc4f8fb 100644 --- a/typegate/tests/e2e/cli/dev_test.ts +++ b/typegate/tests/e2e/cli/dev_test.ts @@ -31,15 +31,17 @@ async function writeTypegraph(version: number | null, target = "migration.py") { } } -Meta.test.only( +Meta.test( { name: "meta dev: choose to reset the database", - gitRepo: { content: { "metatype.yml": "metatype.yml", }, }, + // // TODO remove this + // sanitizeResources: false, + // sanitizeOps: false, }, async (t) => { const schema = randomSchema(); @@ -62,7 +64,7 @@ Meta.test.only( }); await metadev.fetchStderrLines((line) => { - console.log("line:", line); + // console.log("line:", line); return !$.stripAnsi(line).includes( "successfully deployed typegraph migration-failure-test from migration.py", ); @@ -91,38 +93,38 @@ Meta.test.only( await t.should("load second version of the typegraph", async () => { await writeTypegraph(1, tgDefPath); await metadev.fetchStderrLines((line) => { - console.log("line:", line); + // console.log("line:", line); return !line.includes("[select]"); }); await metadev.writeLine("3"); }); - // await metadev.fetchStderrLines((line) => { - // console.log("line:", line); - // return !$.stripAnsi(line).includes( - // "Successfully pushed typegraph migration-failure-test", - // ); - // }); - - // await t.should("database be empty", async () => { - // const e = t.getTypegraphEngine(tgName); - // if (!e) { - // throw new Error("typegraph not found"); - // } - // await gql` - // query { - // findRecords { - // id - // age - // } - // } - // ` - // .expectData({ - // findRecords: [], - // }) - // .on(e); - // }); + await metadev.fetchStderrLines((line) => { + console.log("line:", line); + return !$.stripAnsi(line).includes( + "successfully deployed typegraph migration-failure-test", + ); + }); + + await t.should("database be empty", async () => { + const e = t.getTypegraphEngine(tgName); + if (!e) { + throw new Error("typegraph not found"); + } + await gql` + query { + findRecords { + id + age + } + } + ` + .expectData({ + findRecords: [], + }) + .on(e); + }); await metadev.close(); }, @@ -141,7 +143,6 @@ async function listSubdirs(path: string): Promise { Meta.test( { name: "meta dev: remove latest migration", - gitRepo: { content: { "metatype.yml": "metatype.yml", @@ -174,9 +175,9 @@ Meta.test( }); await metadev.fetchStderrLines((line) => { - console.log("line:", line); + // console.log("line:", line); return !$.stripAnsi(line).includes( - "Successfully pushed typegraph migration-failure-test", + "successfully deployed typegraph migration-failure-test", ); }); @@ -214,7 +215,7 @@ Meta.test( await t.should("load second version of the typegraph", async () => { await writeTypegraph(1, tgDefFile); await metadev.fetchStderrLines((line) => { - console.log("line:", line); + // console.log("line:", line); return !line.includes("[select]"); }); @@ -224,7 +225,7 @@ Meta.test( }); await metadev.fetchStderrLines((line) => { - console.log("line:", line); + // console.log("line:", line); return !line.includes("Removed migration directory"); }); diff --git a/typegraph/node/sdk/src/envs/cli.ts b/typegraph/node/sdk/src/envs/cli.ts index e19fedc346..e2c3d7f9e8 100644 --- a/typegraph/node/sdk/src/envs/cli.ts +++ b/typegraph/node/sdk/src/envs/cli.ts @@ -52,7 +52,7 @@ export function loadCliEnv(): CliEnv | null { if (envValue === "all") { record[key] = null; } else { - const prefix = "typegraph="; + const prefix = "typegraphs="; if (!envValue.startsWith(prefix)) { throw new Error(`invalid ${name} env value: ${envValue}`); } else { diff --git a/typegraph/python/typegraph/envs/cli.py b/typegraph/python/typegraph/envs/cli.py index 03f7254e94..9bacaaf611 100644 --- a/typegraph/python/typegraph/envs/cli.py +++ b/typegraph/python/typegraph/envs/cli.py @@ -5,6 +5,7 @@ from typing import Optional, List from os import environ from enum import Enum +from typegraph.io import Log _required_cli_envs = ( "version", @@ -68,9 +69,11 @@ def load(cls) -> Optional["CliEnv"]: if raw_filter == "all": filter = None else: - if not raw_filter.startswith("typegraph="): + prefix = "typegraphs=" + if not raw_filter.startswith(prefix): raise Exception(f"invalid MCLI_FILTER env value: {raw_filter}") - filter = raw_filter.removeprefix("typegraph=").split(",") + Log.debug("raw_filter", raw_filter) + filter = raw_filter[len(prefix) :].split(",") d["filter"] = filter return cls(**d) diff --git a/typegraph/python/typegraph/graph/tg_manage.py b/typegraph/python/typegraph/graph/tg_manage.py index 61b4f348d5..a0d5b3d126 100644 --- a/typegraph/python/typegraph/graph/tg_manage.py +++ b/typegraph/python/typegraph/graph/tg_manage.py @@ -2,6 +2,7 @@ # SPDX-License-Identifier: MPL-2.0 import traceback +from pathlib import Path from typegraph.gen.exports.core import ( FinalizeParams, @@ -35,6 +36,9 @@ def run(self): else: raise Exception("unreachable") + def get_migration_dir(self): + return str(Path(self.env.migrations_dir) / self.typegraph.name) + def serialize(self): env = self.env params = FinalizeParams( @@ -43,7 +47,7 @@ def serialize(self): artifact_resolution=True, codegen=False, prisma_migration=PrismaMigrationConfig( - migrations_dir=env.migrations_dir, + migrations_dir=self.get_migration_dir(), migration_actions=[], default_migration_action=MigrationAction( apply=True, @@ -70,7 +74,7 @@ def deploy(self): artifact_resolution=True, codegen=False, prisma_migration=PrismaMigrationConfig( - migrations_dir=env.migrations_dir, + migrations_dir=self.get_migration_dir(), migration_actions=list(deploy_data.migration_actions.items()), default_migration_action=deploy_data.default_migration_action, ), @@ -98,7 +102,7 @@ def deploy(self): typegraph_path=env.typegraph_path, prefix=env.prefix, secrets=deploy_data.secrets, - migrations_dir=env.migrations_dir, + migrations_dir=self.get_migration_dir(), migration_actions=deploy_data.migration_actions, default_migration_action=deploy_data.default_migration_action, ) From ea2e4e834ff77d64ada53f485cacdab632e72e16 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Tue, 11 Jun 2024 15:44:34 +0300 Subject: [PATCH 16/35] cleanup, remove server store --- meta-cli/src/cli/completion.rs | 3 +- meta-cli/src/cli/deploy.rs | 34 +--- meta-cli/src/cli/dev.rs | 5 +- meta-cli/src/cli/doctor.rs | 15 +- meta-cli/src/cli/gen.rs | 15 +- meta-cli/src/cli/mod.rs | 3 +- meta-cli/src/cli/new.rs | 3 +- meta-cli/src/cli/serialize.rs | 11 +- meta-cli/src/cli/typegate.rs | 3 +- meta-cli/src/cli/undeploy.rs | 3 +- meta-cli/src/cli/upgrade.rs | 3 +- meta-cli/src/com/mod.rs | 1 - meta-cli/src/com/responses.rs | 74 ------- meta-cli/src/com/server.rs | 185 ------------------ meta-cli/src/deploy/actors/console.rs | 6 +- meta-cli/src/deploy/actors/task.rs | 4 +- meta-cli/src/deploy/actors/task/deploy.rs | 2 +- .../deploy/actors/task/deploy/migrations.rs | 58 +----- meta-cli/src/deploy/actors/task_io.rs | 1 - meta-cli/src/deploy/actors/task_manager.rs | 42 ++-- meta-cli/src/deploy/push/pusher.rs | 11 -- meta-cli/src/main.rs | 20 +- meta-cli/src/typegraph/loader/discovery.rs | 10 +- 23 files changed, 54 insertions(+), 458 deletions(-) delete mode 100644 meta-cli/src/com/server.rs diff --git a/meta-cli/src/cli/completion.rs b/meta-cli/src/cli/completion.rs index e4f0a22106..c3b03dc92b 100644 --- a/meta-cli/src/cli/completion.rs +++ b/meta-cli/src/cli/completion.rs @@ -3,7 +3,6 @@ use crate::interlude::*; -use actix_web::dev::ServerHandle; use clap::CommandFactory; use clap::Parser; use clap::ValueEnum; @@ -24,7 +23,7 @@ pub struct Completion { #[async_trait] impl Action for Completion { #[tracing::instrument] - async fn run(&self, _args: ConfigArgs, _: Option) -> Result<()> { + async fn run(&self, _args: ConfigArgs) -> Result<()> { let mut cmd = Args::command(); let name = cmd.get_name().to_string(); match self.shell.or_else(Shell::from_env) { diff --git a/meta-cli/src/cli/deploy.rs b/meta-cli/src/cli/deploy.rs index cf7a6cd76e..0257e2431a 100644 --- a/meta-cli/src/cli/deploy.rs +++ b/meta-cli/src/cli/deploy.rs @@ -4,15 +4,11 @@ use self::actors::task::deploy::{DeployAction, DeployActionGenerator}; use self::actors::task_manager::{self, StopReason}; use super::{Action, ConfigArgs, NodeArgs}; -use crate::com::store::{Command, Endpoint, ServerStore}; use crate::config::Config; use crate::deploy::actors; use crate::deploy::actors::console::ConsoleActor; -use crate::deploy::actors::task_manager::TaskManager; -use crate::deploy::actors::watcher::{self, WatcherActor}; use crate::interlude::*; use crate::secrets::{RawSecrets, Secrets}; -use actix_web::dev::ServerHandle; use clap::Parser; use common::node::Node; use owo_colors::OwoColorize; @@ -119,25 +115,7 @@ impl Deploy { .await .context("error while building node from config")?; - ServerStore::with(Some(Command::Deploy), Some(config.as_ref().to_owned())); - // ServerStore::set_migration_action_glob(MigrationAction { - // create: deploy.options.create_migration, - // reset: deploy.options.allow_destructive, // reset on drift - // }); - ServerStore::set_endpoint(Endpoint { - typegate: node.base_url.clone().into(), - auth: node.auth.clone(), - }); - ServerStore::set_prefix(node_config.prefix); - ServerStore::set_codegen_flag(deploy.options.codegen); - let file = deploy.file.clone(); - // let file = deploy - // .file - // .as_ref() - // .map(|f| f.normalize()) - // .transpose()? - // .map(|f| f.into_path_buf()); if let Some(file) = &file { if let Err(err) = crate::config::ModuleType::try_from(file.as_path()) { bail!("file is not a valid module type: {err:#}") @@ -155,15 +133,10 @@ impl Deploy { } } -struct CtrlCHandlerData { - watcher: Addr>, - task_manager: Addr>, -} - #[async_trait] impl Action for DeploySubcommand { #[tracing::instrument(level = "debug")] - async fn run(&self, args: ConfigArgs, server_handle: Option) -> Result<()> { + async fn run(&self, args: ConfigArgs) -> Result<()> { let deploy = Deploy::new(self, &args).await?; if !self.options.allow_dirty { @@ -196,7 +169,6 @@ impl Action for DeploySubcommand { trace!("running in default mode"); // deploy a single file let status = default_mode::run(deploy).await?; - server_handle.unwrap().stop(true).await; status }; @@ -321,10 +293,6 @@ mod watch_mode { deploy.options.allow_destructive, ); - // ServerStore::set_secrets(secrets.hydrate(deploy.base_dir.clone()).await?); - - // let (loader_event_tx, loader_event_rx) = mpsc::unbounded_channel(); - let mut init = TaskManagerInit::::new( deploy.config.clone(), action_generator.clone(), diff --git a/meta-cli/src/cli/dev.rs b/meta-cli/src/cli/dev.rs index 7001d203b5..f5468fb687 100644 --- a/meta-cli/src/cli/dev.rs +++ b/meta-cli/src/cli/dev.rs @@ -7,7 +7,6 @@ use super::deploy::DeploySubcommand; use super::Action; use super::ConfigArgs; use super::NodeArgs; -use actix_web::dev::ServerHandle; use clap::Parser; #[derive(Parser, Debug)] @@ -33,7 +32,7 @@ pub struct Dev { #[async_trait] impl Action for Dev { #[tracing::instrument] - async fn run(&self, args: ConfigArgs, server_handle: Option) -> Result<()> { + async fn run(&self, args: ConfigArgs) -> Result<()> { log::info!("'meta dev' subcommand is an alias to 'meta deploy --codegen --allow-dirty --watch --create-migration'"); let options = DeployOptions { codegen: true, @@ -52,6 +51,6 @@ impl Action for Dev { None, self.max_parallel_loads, ); - deploy.run(args, server_handle).await + deploy.run(args).await } } diff --git a/meta-cli/src/cli/doctor.rs b/meta-cli/src/cli/doctor.rs index 30a24f6ac4..b40ee8cff4 100644 --- a/meta-cli/src/cli/doctor.rs +++ b/meta-cli/src/cli/doctor.rs @@ -4,17 +4,12 @@ use crate::{interlude::*, typegraph::loader::discovery::Discovery}; use super::{Action, ConfigArgs}; -use crate::{ - cli::ui, - config::{Config, PIPFILE_FILES, PYPROJECT_FILES, REQUIREMENTS_FILES, VENV_FOLDERS}, - fs::{clean_path, find_in_parents}, - global_config::GlobalConfig, -}; - -use actix_web::dev::ServerHandle; +use crate::cli::ui; +use crate::config::{Config, PIPFILE_FILES, PYPROJECT_FILES, REQUIREMENTS_FILES, VENV_FOLDERS}; +use crate::fs::{clean_path, find_in_parents}; +use crate::global_config::GlobalConfig; use clap::Parser; use owo_colors::OwoColorize; - use std::process::Command; #[derive(Parser, Debug)] @@ -37,7 +32,7 @@ fn shell(cmds: Vec<&str>) -> Result { #[async_trait] impl Action for Doctor { #[tracing::instrument] - async fn run(&self, args: ConfigArgs, _: Option) -> Result<()> { + async fn run(&self, args: ConfigArgs) -> Result<()> { let dir = &args.dir(); let w = 60; diff --git a/meta-cli/src/cli/gen.rs b/meta-cli/src/cli/gen.rs index 1681931430..371ea22ddb 100644 --- a/meta-cli/src/cli/gen.rs +++ b/meta-cli/src/cli/gen.rs @@ -8,7 +8,6 @@ use crate::deploy::actors::task_manager::{TaskManagerInit, TaskSource}; use crate::interlude::*; use crate::{com::store::ServerStore, config::Config, deploy::actors::console::ConsoleActor}; use actix::Actor; -use actix_web::dev::ServerHandle; use clap::{Parser, ValueEnum}; use common::typegraph::Typegraph; use metagen::*; @@ -58,7 +57,7 @@ pub struct Gen { #[async_trait] impl Action for Gen { #[tracing::instrument] - async fn run(&self, args: ConfigArgs, server_handle: Option) -> Result<()> { + async fn run(&self, args: ConfigArgs) -> Result<()> { let dir = args.dir(); let mut config = Config::load_or_find(args.config, &dir)?; @@ -99,11 +98,11 @@ impl Action for Gen { }) .await?; - let responses = ServerStore::get_responses(file) - .context("invalid state, no response received")?; - for (_, res) in responses.iter() { - res.codegen()? - } + // let responses = ServerStore::get_responses(file) + // .context("invalid state, no response received")?; + // for (_, res) in responses.iter() { + // res.codegen()? + // } } GeneratorOp::Mdk => { let files = metagen::generate_target( @@ -129,8 +128,6 @@ impl Action for Gen { } }; - server_handle.unwrap().stop(true).await; - Ok(()) } } diff --git a/meta-cli/src/cli/mod.rs b/meta-cli/src/cli/mod.rs index c97458b4c8..7421392bd0 100644 --- a/meta-cli/src/cli/mod.rs +++ b/meta-cli/src/cli/mod.rs @@ -4,7 +4,6 @@ use crate::interlude::*; use crate::utils::clap::UrlValueParser; -use actix_web::dev::ServerHandle; use clap::Parser; use clap::Subcommand; use clap_verbosity_flag::Verbosity; @@ -86,7 +85,7 @@ pub(crate) enum Commands { #[async_trait] #[enum_dispatch(Commands)] pub trait Action { - async fn run(&self, args: ConfigArgs, server_handle: Option) -> Result<()>; + async fn run(&self, args: ConfigArgs) -> Result<()>; } #[derive(Parser, Debug, Clone)] diff --git a/meta-cli/src/cli/new.rs b/meta-cli/src/cli/new.rs index b8e99a087c..3e4738d803 100644 --- a/meta-cli/src/cli/new.rs +++ b/meta-cli/src/cli/new.rs @@ -4,7 +4,6 @@ use crate::interlude::*; use super::{Action, ConfigArgs}; -use actix_web::dev::ServerHandle; use async_trait::async_trait; use clap::{Parser, ValueEnum}; use include_dir::{include_dir, Dir}; @@ -44,7 +43,7 @@ pub struct New { #[async_trait] impl Action for New { #[tracing::instrument] - async fn run(&self, args: ConfigArgs, _: Option) -> Result<()> { + async fn run(&self, args: ConfigArgs) -> Result<()> { let dir = PathBuf::from(&self.dir); let target_dir = if dir.is_absolute() { dir diff --git a/meta-cli/src/cli/serialize.rs b/meta-cli/src/cli/serialize.rs index 3fe089937a..181b00bc1c 100644 --- a/meta-cli/src/cli/serialize.rs +++ b/meta-cli/src/cli/serialize.rs @@ -1,7 +1,7 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 + use super::{Action, ConfigArgs}; -use crate::com::store::{Command, ServerStore}; use crate::config::{Config, PathOption}; use crate::deploy::actors::console::ConsoleActor; use crate::deploy::actors::task::serialize::{ @@ -10,7 +10,6 @@ use crate::deploy::actors::task::serialize::{ use crate::deploy::actors::task::TaskFinishStatus; use crate::deploy::actors::task_manager::{Report, StopReason, TaskManagerInit, TaskSource}; use crate::interlude::*; -use actix_web::dev::ServerHandle; use clap::Parser; use common::typegraph::Typegraph; use core::fmt::Debug; @@ -53,16 +52,12 @@ pub struct Serialize { #[async_trait] impl Action for Serialize { #[tracing::instrument] - async fn run(&self, args: ConfigArgs, server_handle: Option) -> Result<()> { + async fn run(&self, args: ConfigArgs) -> Result<()> { let dir = args.dir(); let config_path = args.config.clone(); let config = Config::load_or_find(config_path, &dir)?; - // Minimum setup - ServerStore::with(Some(Command::Serialize), Some(config.to_owned())); - ServerStore::set_prefix(self.prefix.to_owned()); - let config = Arc::new(config); let console = ConsoleActor::new(Arc::clone(&config)).start(); @@ -125,8 +120,6 @@ impl Action for Serialize { self.write(&self.to_string(&tgs)?).await?; } - server_handle.unwrap().stop(true).await; - Ok(()) } } diff --git a/meta-cli/src/cli/typegate.rs b/meta-cli/src/cli/typegate.rs index 31c7cd7fb6..1ac69711a4 100644 --- a/meta-cli/src/cli/typegate.rs +++ b/meta-cli/src/cli/typegate.rs @@ -2,7 +2,6 @@ // SPDX-License-Identifier: MPL-2.0 use crate::interlude::*; -use actix_web::dev::ServerHandle; use clap::Parser; use crate::cli::{Action, ConfigArgs}; @@ -19,7 +18,7 @@ pub struct Typegate { #[async_trait] impl Action for Typegate { - async fn run(&self, _gen_args: ConfigArgs, _: Option) -> Result<()> { + async fn run(&self, _gen_args: ConfigArgs) -> Result<()> { unreachable!() } } diff --git a/meta-cli/src/cli/undeploy.rs b/meta-cli/src/cli/undeploy.rs index 4d321e7248..354f70f16e 100644 --- a/meta-cli/src/cli/undeploy.rs +++ b/meta-cli/src/cli/undeploy.rs @@ -4,7 +4,6 @@ use crate::interlude::*; use crate::config::Config; -use actix_web::dev::ServerHandle; use clap::Parser; use super::{Action, NodeArgs}; @@ -25,7 +24,7 @@ pub struct Undeploy { #[async_trait] impl Action for Undeploy { #[tracing::instrument] - async fn run(&self, args: super::ConfigArgs, _: Option) -> Result<()> { + async fn run(&self, args: super::ConfigArgs) -> Result<()> { let dir = args.dir(); let config_path = args.config.clone(); let config = Config::load_or_find(config_path, &dir)?; diff --git a/meta-cli/src/cli/upgrade.rs b/meta-cli/src/cli/upgrade.rs index 0c80366a84..cfa19bcbad 100644 --- a/meta-cli/src/cli/upgrade.rs +++ b/meta-cli/src/cli/upgrade.rs @@ -7,7 +7,6 @@ use crate::global_config::GlobalConfig; use super::{Action, ConfigArgs}; use crate::build; -use actix_web::dev::ServerHandle; use chrono::{Duration, Utc}; use clap::Parser; use self_update::{backends::github::Update, update::UpdateStatus}; @@ -27,7 +26,7 @@ pub struct Upgrade { #[async_trait] impl Action for Upgrade { #[tracing::instrument] - async fn run(&self, _args: ConfigArgs, _: Option) -> Result<()> { + async fn run(&self, _args: ConfigArgs) -> Result<()> { // https://github.com/jaemk/self_update/issues/44 let opts = self.clone(); tokio::task::spawn_blocking(move || { diff --git a/meta-cli/src/com/mod.rs b/meta-cli/src/com/mod.rs index a8a3f7d423..d0fd96a3c8 100644 --- a/meta-cli/src/com/mod.rs +++ b/meta-cli/src/com/mod.rs @@ -2,5 +2,4 @@ // SPDX-License-Identifier: MPL-2.0 pub mod responses; -pub mod server; pub mod store; diff --git a/meta-cli/src/com/responses.rs b/meta-cli/src/com/responses.rs index 1ae9d50f1e..71188ab44c 100644 --- a/meta-cli/src/com/responses.rs +++ b/meta-cli/src/com/responses.rs @@ -3,8 +3,6 @@ use crate::interlude::*; use super::store::Command; -use crate::{codegen::deno::Codegen, deploy::push::pusher::PushResultRaw}; -use common::typegraph::Typegraph; use serde_json::Value; // CLI => SDK @@ -40,75 +38,3 @@ pub struct SDKError { #[allow(unused)] value: serde_json::Value, } - -impl SDKResponse { - pub fn validate(&self) -> Result<()> { - if self.data.is_none() && self.error.is_none() { - // This should never happen - // maybe use panic instead? - bail!( - "typegraph {:?} provided an invalid response, data and error fields are both undefined", - self.typegraph_name - ); - } - - if let Some(error) = self.error.clone() { - let err: SDKError = serde_json::from_value(error)?; - bail!( - "SDK {} error: {}", - err.code.strip_suffix("_err").unwrap_or(&err.code), - err.msg - ); - } - - Ok(()) - } - - pub fn as_typegraph(&self) -> Result { - self.validate()?; - let value = self.data.to_owned().unwrap(); - serde_json::from_value(value).map_err(|e| e.into()) - } - - pub fn as_push_result(&self) -> Result { - self.validate()?; - let response: common::graphql::Response = - serde_json::from_value(self.data.clone().unwrap())?; - if let Some(errors) = response.errors { - if errors.len() == 1 { - bail!( - "error response when pushing to typegate: {}", - errors[0].message - ) - } else { - let mut err = ferr!("error responses when pushing to typegate"); - for error in errors { - err = err.section(error.message); - } - return Err(err); - } - } - let field = "addTypegraph"; - let Some(data) = &response.data else { - bail!("unexpected response when pushing to typegate: has no field 'data'") - }; - let value = &data[field]; - if value.is_null() { - bail!("unexpected response when pushing to typegate: has no field 'data.{field}'") - } - Ok(serde_json::from_value(value.clone())?) - } - - pub fn typegraph_dir(&self) -> PathBuf { - let mut ret = self.typegraph_path.clone(); - ret.pop(); // pop file.ext - ret - } - - // TODO: rm once MET-492 lands - pub fn codegen(&self) -> Result<()> { - let tg = self.as_typegraph()?; - let path = self.typegraph_path.clone(); - Codegen::new(&tg, &path).apply_codegen() - } -} diff --git a/meta-cli/src/com/server.rs b/meta-cli/src/com/server.rs deleted file mode 100644 index d4b464f5cc..0000000000 --- a/meta-cli/src/com/server.rs +++ /dev/null @@ -1,185 +0,0 @@ -// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. -// SPDX-License-Identifier: MPL-2.0 -use crate::interlude::*; - -use crate::com::{ - responses::{CLIResponseError, CLIResponseSuccess, SDKResponse}, - store::ServerStore, -}; -use actix_web::{ - dev::Server, - get, post, - web::{PayloadConfig, Query}, - App, HttpRequest, HttpResponse, HttpServer, Responder, -}; -use lazy_static::lazy_static; -use reqwest::StatusCode; -use serde::Deserialize; -use serde_json::json; -use std::{ - io::{Error, ErrorKind}, - net::{Ipv4Addr, SocketAddrV4, TcpListener}, -}; - -pub struct PortManager { - pub tcp_listener: Arc, -} - -impl PortManager { - pub fn new() -> Self { - let addr = SocketAddrV4::new(Ipv4Addr::UNSPECIFIED, 0); - Self { - tcp_listener: Arc::new(TcpListener::bind(addr).unwrap()), - } - } -} - -lazy_static! { - pub static ref PORT_MAN: Arc = Arc::new(PortManager::new()); -} - -pub fn get_instance_port() -> u16 { - PORT_MAN.tcp_listener.local_addr().unwrap().port() -} - -#[derive(Debug, Deserialize)] -struct QueryConfigParams { - typegraph: String, - typegraph_path: PathBuf, -} - -#[get("/config")] -#[tracing::instrument(level = "debug", ret)] -async fn config(req: HttpRequest) -> impl Responder { - let parsed = Query::::from_query(req.query_string()).unwrap_or_log(); - - let mut artefact_base_dir = parsed.typegraph_path.clone(); - artefact_base_dir.pop(); // pop file.ext - - let endpoint = ServerStore::get_endpoint(); - let secrets = ServerStore::get_secrets(&parsed.typegraph); - let migration_action_glob = ServerStore::get_migration_action_glob(); - let disable_artifact_resolution = !ServerStore::get_artifact_resolution_flag(); - let codegen = ServerStore::get_codegen_flag(); - - let mut migration_action_per_rt = vec![]; - if let Some(per_rt_actions) = - ServerStore::get_per_runtime_migration_action(&parsed.typegraph_path) - { - migration_action_per_rt = - per_rt_actions - .iter() - .fold(migration_action_per_rt, |mut acc, local_cfg| { - acc.push(json!([ - local_cfg.runtime_name.clone(), - local_cfg.action.clone() - ])); - acc - }); - } - - let prefix = ServerStore::get_prefix(); - match ServerStore::get_config() { - Some(config) => { - let data = json!({ - "typegate": { - "endpoint": endpoint.typegate, - "auth": endpoint.auth - }, - "secrets": secrets, - "prefix": prefix, - "artifactsConfig": { - // on sdk's side, cwd will match to the parent process (cli) - // thus `dir` must be explicitly set to the canonical typegraph's `workdir` - "dir": artefact_base_dir, - "prismaMigration": { - // only the cli is aware of the convention migrationDir := tg_workdir + folder from config + tg_name - "migrationDir": config.prisma_migrations_dir_rel(&parsed.typegraph), - "globalAction": migration_action_glob, - "runtimeAction": migration_action_per_rt - }, - "disableArtifactResolution": disable_artifact_resolution, - "codegen": codegen - }, - }); - - HttpResponse::Ok() - .status(StatusCode::OK) - .json(CLIResponseSuccess { data }) - } - None => HttpResponse::Ok() - .status(StatusCode::INTERNAL_SERVER_ERROR) - .json(CLIResponseError { - error: "Could not get config from meta-cli".to_string(), - }), - } -} - -#[get("/command")] -#[tracing::instrument(level = "debug", ret)] -async fn command() -> impl Responder { - match ServerStore::get_command() { - Some(command) => HttpResponse::Ok() - .status(StatusCode::OK) - .json(CLIResponseSuccess { - data: serde_json::to_value(command).unwrap(), - }), - None => HttpResponse::Ok() - .status(StatusCode::INTERNAL_SERVER_ERROR) - .json(CLIResponseError { - error: "Could not get command from meta-cli".to_string(), - }), - } -} - -#[post("/response")] -#[tracing::instrument(level = "debug", ret)] -async fn response(req_body: String) -> impl Responder { - let sdk_response: SDKResponse = serde_json::from_str(&req_body).unwrap(); - - match &sdk_response.command { - super::store::Command::Codegen => { - if let Err(e) = sdk_response.codegen() { - return HttpResponse::Ok() - .status(StatusCode::INTERNAL_SERVER_ERROR) - .json(CLIResponseError { - error: e.to_string(), - }); - } - } - _ => { - // to be used later - ServerStore::add_response(sdk_response.clone()); - } - }; - - HttpResponse::Ok() - .status(StatusCode::OK) - .json(CLIResponseSuccess { - data: serde_json::to_value("ok").unwrap(), - }) -} - -pub fn init_server() -> std::io::Result { - let port = get_instance_port(); - - let tcp_listener = PORT_MAN - .tcp_listener - .try_clone() - .map_err(|e| Error::new(ErrorKind::AddrNotAvailable, e.to_string()))?; - - log::trace!("CLI server is listening at http://localhost:{port}"); - - let server = HttpServer::new(|| { - App::new() - .service(config) - .service(command) - .service(response) - .app_data(PayloadConfig::new(1_000_000 * 100)) // mb - }) - .listen(tcp_listener)? - .workers(1) - .run(); - - Ok(server) -} diff --git a/meta-cli/src/deploy/actors/console.rs b/meta-cli/src/deploy/actors/console.rs index 76eb459ba1..0ae07b9793 100644 --- a/meta-cli/src/deploy/actors/console.rs +++ b/meta-cli/src/deploy/actors/console.rs @@ -3,15 +3,11 @@ pub mod input; +use crate::config::Config; use crate::interlude::*; - use std::io::BufRead; - -use actix::prelude::*; use tokio::sync::oneshot; -use crate::config::Config; - enum Mode { Input { output_buffer: Vec>, diff --git a/meta-cli/src/deploy/actors/task.rs b/meta-cli/src/deploy/actors/task.rs index 1429dc42ad..8e4827a08f 100644 --- a/meta-cli/src/deploy/actors/task.rs +++ b/meta-cli/src/deploy/actors/task.rs @@ -339,7 +339,7 @@ impl Handler> for TaskActor { impl Handler> for TaskActor { type Result = (); - fn handle(&mut self, UpdateResults(results): UpdateResults, ctx: &mut Context) { + fn handle(&mut self, UpdateResults(results): UpdateResults, _ctx: &mut Context) { for result in results.into_iter() { let tg_name = get_typegraph_name::(&result); self.results.insert(tg_name, result); @@ -366,7 +366,7 @@ impl Handler> for TaskActor { impl Handler for TaskActor { type Result = (); - fn handle(&mut self, _msg: Stop, ctx: &mut Context) -> Self::Result { + fn handle(&mut self, _msg: Stop, _ctx: &mut Context) -> Self::Result { let path = self.get_path_owned(); if let Some(process) = &mut self.process { self.console.warning(format!("killing task for {:?}", path)); diff --git a/meta-cli/src/deploy/actors/task/deploy.rs b/meta-cli/src/deploy/actors/task/deploy.rs index 2c15348667..674658ca2c 100644 --- a/meta-cli/src/deploy/actors/task/deploy.rs +++ b/meta-cli/src/deploy/actors/task/deploy.rs @@ -306,7 +306,7 @@ impl TaskAction for DeployAction { } impl MigrationAction { - fn apply_override(mut self, action_override: &MigrationActionOverride) -> Self { + fn apply_override(self, action_override: &MigrationActionOverride) -> Self { match action_override { MigrationActionOverride::ResetDatabase => MigrationAction { reset: true, diff --git a/meta-cli/src/deploy/actors/task/deploy/migrations.rs b/meta-cli/src/deploy/actors/task/deploy/migrations.rs index f9bd12f0c3..19f86bcad8 100644 --- a/meta-cli/src/deploy/actors/task/deploy/migrations.rs +++ b/meta-cli/src/deploy/actors/task/deploy/migrations.rs @@ -3,13 +3,10 @@ use color_eyre::owo_colors::OwoColorize; -use super::{ - DeployAction, DeployActionInner, DeployOptions, Migration, MigrationActionOverride, - PrismaRuntimeId, -}; -use crate::deploy::actors::console::input::{Confirm, Select, SelectOption}; +use super::{DeployAction, DeployActionInner, Migration, MigrationActionOverride}; +use crate::deploy::actors::console::input::{Confirm, Select}; use crate::deploy::actors::console::Console; -use crate::deploy::actors::task::action::{ActionFinalizeContext, TaskFilter}; +use crate::deploy::actors::task::action::ActionFinalizeContext; use crate::deploy::actors::task::TaskActor; use crate::interlude::*; @@ -204,55 +201,6 @@ impl DeployActionInner { } } -pub mod message { - use super::*; - - #[derive(Message)] - #[rtype(result = "()")] - pub(super) struct ConfirmDatabaseReset { - pub typegraph: String, - pub runtime: String, - pub message: String, - } - - #[derive(Message)] - #[rtype(result = "()")] - pub(super) struct ResetDatabase { - pub typegraph: String, - pub runtime: String, - } - - #[derive(Message)] - #[rtype(result = "()")] - pub(super) struct ResolveConstraintViolation { - pub typegraph: String, - pub runtime: String, - pub column: String, - pub migration: String, - pub is_new_column: bool, - pub table: String, - pub message: String, - } - - #[derive(Message)] - #[rtype(result = "()")] - pub(super) struct RemoveLatestMigration { - pub typegraph: String, - pub runtime: String, - pub migration: String, - } - - #[derive(Message)] - #[rtype(result = "()")] - pub(super) struct WaitForManualResolution { - pub typegraph: String, - pub runtime: String, - pub migration: String, - } -} - -use message::*; - #[derive(Debug)] pub struct ConfirmDatabaseResetRequired { pub task: Addr>, diff --git a/meta-cli/src/deploy/actors/task_io.rs b/meta-cli/src/deploy/actors/task_io.rs index 2d8e2021f9..a7d9aec794 100644 --- a/meta-cli/src/deploy/actors/task_io.rs +++ b/meta-cli/src/deploy/actors/task_io.rs @@ -256,7 +256,6 @@ impl TaskIoActor { let scope = self.get_console_scope(); let fut = async move { - let id = req.id; match action.get_rpc_response(&rpc_call).await { Ok(response) => { self_addr.do_send(message::SendRpcResponse(req.response(response))); diff --git a/meta-cli/src/deploy/actors/task_manager.rs b/meta-cli/src/deploy/actors/task_manager.rs index f4b8d292e6..d1eda2f051 100644 --- a/meta-cli/src/deploy/actors/task_manager.rs +++ b/meta-cli/src/deploy/actors/task_manager.rs @@ -1,26 +1,16 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use std::{ - collections::{HashSet, VecDeque}, - sync::atomic::{AtomicUsize, Ordering}, -}; - +use super::console::{Console, ConsoleActor}; +use super::discovery::DiscoveryActor; +use super::task::action::{TaskAction, TaskActionGenerator}; +use super::task::{self, TaskActor, TaskFinishStatus}; +use super::watcher::WatcherActor; +use crate::{config::Config, interlude::*}; use futures::channel::oneshot; use indexmap::IndexMap; - -use crate::{config::Config, interlude::*}; - -use super::{ - console::{Console, ConsoleActor}, - discovery::DiscoveryActor, - task::{ - self, - action::{TaskAction, TaskActionGenerator}, - TaskActor, TaskFinishStatus, - }, - watcher::WatcherActor, -}; +use std::collections::VecDeque; +use std::sync::atomic::{AtomicUsize, Ordering}; pub mod report; pub use report::Report; @@ -97,7 +87,7 @@ impl TaskGenerator { } #[derive(Clone, Copy, Debug, PartialEq, Eq)] -struct TaskId(usize); +pub struct TaskId(usize); #[derive(Clone, Debug)] pub struct TaskRef { @@ -106,10 +96,10 @@ pub struct TaskRef { pub retry_no: usize, } -enum RetryStatus { - Pending, - Cancelled, -} +// enum RetryStatus { +// Pending, +// Cancelled, +// } pub enum TaskSource { Static(Vec), @@ -414,6 +404,10 @@ impl Handler> for TaskManager { self.reports .insert(message.task_ref.path.clone(), message.status); + if let Some(_next_retry_no) = next_retry_no { + todo!("not implemented"); + } + // TODO check queue?? if self.active_tasks.is_empty() { if self.watcher_addr.is_none() && self.pending_retries.is_empty() { @@ -466,7 +460,7 @@ impl Handler for TaskManager { impl Handler for TaskManager { type Result = (); - fn handle(&mut self, _msg: ForceStop, ctx: &mut Context) -> Self::Result { + fn handle(&mut self, _msg: ForceStop, _ctx: &mut Context) -> Self::Result { self.console .warning("force stopping active tasks".to_string()); for (_, addr) in self.active_tasks.iter() { diff --git a/meta-cli/src/deploy/push/pusher.rs b/meta-cli/src/deploy/push/pusher.rs index 38f2c89b3e..c8331d0e60 100644 --- a/meta-cli/src/deploy/push/pusher.rs +++ b/meta-cli/src/deploy/push/pusher.rs @@ -7,9 +7,6 @@ use std::time::Duration; use serde::Deserialize; -use crate::com::responses::SDKResponse; -use crate::deploy::actors::console::ConsoleActor; - use lazy_static::lazy_static; #[derive(Deserialize, Debug)] @@ -34,14 +31,6 @@ pub struct Migrations { pub migrations: String, } -#[derive(Deserialize, Debug, Clone)] -#[serde(tag = "reason")] -enum PushFailure { - Unknown(GenericPushFailure), - DatabaseResetRequired(DatabaseResetRequired), - NullConstraintViolation(NullConstraintViolation), -} - #[derive(Deserialize)] #[serde(rename_all = "camelCase")] pub struct PushResultRaw { diff --git a/meta-cli/src/main.rs b/meta-cli/src/main.rs index fd2ffae8b7..feafd3175d 100644 --- a/meta-cli/src/main.rs +++ b/meta-cli/src/main.rs @@ -53,9 +53,6 @@ use clap::Parser; use cli::upgrade::upgrade_check; use cli::Action; use cli::Args; -use com::server::init_server; -use futures::try_join; -use futures::FutureExt; use shadow_rs::shadow; shadow!(build); @@ -114,25 +111,14 @@ fn main() -> Result<()> { tokio::runtime::Builder::new_multi_thread() .enable_all() .build()? - .block_on( - async { - let server = init_server().unwrap(); - let command = gen_args.run(args.config, Some(server.handle())); - - try_join!(command, server.map(|_| Ok(()))) - } - .in_current_span(), - )?; + .block_on(async { gen_args.run(args.config).await }.in_current_span())?; } Some(command) => runner.block_on(async move { match command { cli::Commands::Serialize(_) | cli::Commands::Dev(_) | cli::Commands::Deploy(_) => { - let server = init_server().unwrap(); - let command = command.run(args.config, Some(server.handle())); - - try_join!(command, server.map(|_| Ok(()))).map(|_| ()) + command.run(args.config).await } - _ => command.run(args.config, None).await.map(|_| ()), + _ => command.run(args.config).await.map(|_| ()), } })?, None => Args::command().print_help()?, diff --git a/meta-cli/src/typegraph/loader/discovery.rs b/meta-cli/src/typegraph/loader/discovery.rs index 9450999598..e57b570f69 100644 --- a/meta-cli/src/typegraph/loader/discovery.rs +++ b/meta-cli/src/typegraph/loader/discovery.rs @@ -153,20 +153,18 @@ impl FileFilter { } match ModuleType::try_from(path) { - Ok(ModuleType::Python) => { - self.is_excluded_by_filter(path, &rel_path, &self.python_filter) - } + Ok(ModuleType::Python) => self.is_excluded_by_filter(&rel_path, &self.python_filter), Ok(ModuleType::TypeScript) => { - self.is_excluded_by_filter(path, &rel_path, &self.typescript_filter) + self.is_excluded_by_filter(&rel_path, &self.typescript_filter) } Ok(ModuleType::JavaScript) => { - self.is_excluded_by_filter(path, &rel_path, &self.javascript_filter) + self.is_excluded_by_filter(&rel_path, &self.javascript_filter) } Err(_) => true, } } - fn is_excluded_by_filter(&self, path: &Path, rel_path: &Path, filter: &GlobFilter) -> bool { + fn is_excluded_by_filter(&self, rel_path: &Path, filter: &GlobFilter) -> bool { if !filter.include_set.is_empty() && !filter.include_set.is_match(rel_path) { return true; } From c005bd2aa247b4b605f84c715a120bb483a17aa5 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Tue, 11 Jun 2024 17:57:46 +0300 Subject: [PATCH 17/35] fix --- Cargo.lock | 99 ++++------------------- meta-cli/src/cli/deploy.rs | 2 +- meta-cli/src/deploy/actors/task/deploy.rs | 5 +- 3 files changed, 21 insertions(+), 85 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b31d882afa..503cf3a601 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -662,7 +662,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d034b430882f8381900d3fe6f0aaa3ad94f2cb4ac519b429692a1bc2dda4ae7b" dependencies = [ "event-listener 4.0.3", - "event-listener-strategy", + "event-listener-strategy 0.4.0", "pin-project-lite", ] @@ -1332,7 +1332,7 @@ dependencies = [ "darling 0.20.9", "proc-macro2", "quote", - "syn 2.0.65", + "syn 2.0.66", ] [[package]] @@ -4194,6 +4194,17 @@ dependencies = [ "pin-project-lite", ] +[[package]] +name = "event-listener" +version = "5.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + [[package]] name = "event-listener-strategy" version = "0.4.0" @@ -4210,7 +4221,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" dependencies = [ - "event-listener 5.3.0", + "event-listener 5.3.1", "pin-project-lite", ] @@ -4983,85 +4994,6 @@ dependencies = [ "thiserror", ] -[[package]] -name = "grep" -version = "0.2.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd79f01019ef2fe3978232135f5a7237baca9a6c6ed4dbbe9e5a51234e2306c5" -dependencies = [ - "grep-cli", - "grep-matcher", - "grep-printer", - "grep-regex", - "grep-searcher", -] - -[[package]] -name = "grep-cli" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea40788c059ab8b622c4d074732750bfb3bd2912e2dd58eabc11798a4d5ad725" -dependencies = [ - "bstr", - "globset", - "libc", - "log", - "termcolor", - "winapi-util", -] - -[[package]] -name = "grep-matcher" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47a3141a10a43acfedc7c98a60a834d7ba00dfe7bec9071cbfc19b55b292ac02" -dependencies = [ - "memchr", -] - -[[package]] -name = "grep-printer" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e14551578f49da1f774b70da5bd1b8c20bbbead01620c426cb0a217536d95a6a" -dependencies = [ - "base64 0.20.0", - "bstr", - "grep-matcher", - "grep-searcher", - "serde 1.0.203", - "serde_json", - "termcolor", -] - -[[package]] -name = "grep-regex" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f748bb135ca835da5cbc67ca0e6955f968db9c5df74ca4f56b18e1ddbc68230d" -dependencies = [ - "bstr", - "grep-matcher", - "log", - "regex-automata 0.4.6", - "regex-syntax 0.8.3", -] - -[[package]] -name = "grep-searcher" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba536ae4f69bec62d8839584dd3153d3028ef31bb229f04e09fb5a9e5a193c54" -dependencies = [ - "bstr", - "encoding_rs", - "encoding_rs_io", - "grep-matcher", - "log", - "memchr", - "memmap2 0.9.4", -] - [[package]] name = "group" version = "0.13.0" @@ -6579,6 +6511,7 @@ dependencies = [ "assert_cmd", "async-trait", "base64 0.22.1", + "cached", "chrono", "clap", "clap-verbosity-flag", @@ -7674,7 +7607,7 @@ checksum = "006e42d5b888366f1880eda20371fedde764ed2213dc8496f49622fa0c99cd5e" dependencies = [ "log", "serde 1.0.203", - "windows-sys 0.52.0", + "winapi", ] [[package]] diff --git a/meta-cli/src/cli/deploy.rs b/meta-cli/src/cli/deploy.rs index 05d85297b2..9eb8a73967 100644 --- a/meta-cli/src/cli/deploy.rs +++ b/meta-cli/src/cli/deploy.rs @@ -93,7 +93,7 @@ pub struct Deploy { impl Deploy { #[tracing::instrument] pub async fn new(deploy: &DeploySubcommand, args: &ConfigArgs) -> Result { - let dir: Arc = args.dir().into(); + let dir: Arc = args.dir()?.into(); let config_path = args.config.clone(); let config = Arc::new(Config::load_or_find(config_path, &dir)?); diff --git a/meta-cli/src/deploy/actors/task/deploy.rs b/meta-cli/src/deploy/actors/task/deploy.rs index 674658ca2c..9631b55c1e 100644 --- a/meta-cli/src/deploy/actors/task/deploy.rs +++ b/meta-cli/src/deploy/actors/task/deploy.rs @@ -298,7 +298,10 @@ impl TaskAction for DeployAction { async fn get_rpc_response(&self, call: &RpcCall) -> Result { match call { - RpcCall::GetDeployTarget => Ok(serde_json::to_value(&self.deploy_target)?), + RpcCall::GetDeployTarget => { + let deploy_target: &Node = &self.deploy_target; + Ok(serde_json::to_value(&deploy_target)?) + } RpcCall::GetDeployData { typegraph } => Ok(self.get_deploy_data(typegraph)), } From 19aad79c13d9d396cad15e05c7a1b08454dfed6c Mon Sep 17 00:00:00 2001 From: Natoandro Date: Tue, 11 Jun 2024 20:04:39 +0300 Subject: [PATCH 18/35] lint, fix tg_deploy test --- .vscode/settings.json | 12 ++---- Cargo.lock | 1 + libs/common/Cargo.toml | 1 + libs/metagen/src/mdk_rust/stubs.rs | 4 +- libs/metagen/src/tests/fixtures.rs | 4 +- libs/metagen/src/tests/mod.rs | 4 +- meta-cli/src/cli/deploy.rs | 6 +-- meta-cli/src/cli/gen.rs | 5 +-- meta-cli/src/cli/serialize.rs | 4 +- meta-cli/src/com/responses.rs | 9 ---- meta-cli/src/deploy/actors/discovery.rs | 2 +- meta-cli/src/deploy/actors/task.rs | 20 ++++----- meta-cli/src/deploy/actors/task/action.rs | 13 +++--- meta-cli/src/deploy/actors/task/deploy.rs | 4 +- meta-cli/src/deploy/actors/task_io.rs | 5 ++- meta-cli/src/deploy/actors/task_manager.rs | 23 +++-------- meta-cli/src/deploy/push/pusher.rs | 6 --- meta-cli/src/main.rs | 4 +- meta-cli/src/typegraph/loader/discovery.rs | 4 -- typegate/tests/e2e/cli/deploy_test.ts | 41 +++++++++---------- typegate/tests/utils/tg_deploy_script.py | 30 +++++--------- typegraph/core/src/global_store.rs | 2 +- typegraph/core/src/lib.rs | 36 ++++++++++++---- typegraph/core/src/typegraph.rs | 17 +++++--- typegraph/core/src/utils/postprocess/mod.rs | 6 +-- .../core/src/utils/postprocess/prisma_rt.rs | 28 ++++++------- typegraph/core/wit/typegraph.wit | 30 +++++--------- typegraph/node/sdk/src/metagen.ts | 11 ++--- typegraph/node/sdk/src/tg_deploy.ts | 5 ++- typegraph/node/sdk/src/tg_manage.ts | 6 ++- typegraph/node/sdk/src/typegraph.ts | 8 ++-- typegraph/node/sdk/src/utils/func_utils.ts | 4 +- typegraph/python/typegraph/graph/metagen.py | 15 ++++--- .../python/typegraph/graph/shared_types.py | 4 +- typegraph/python/typegraph/graph/tg_deploy.py | 28 +++++++------ typegraph/python/typegraph/graph/tg_manage.py | 9 ++-- typegraph/python/typegraph/graph/typegraph.py | 8 ++-- typegraph/python/typegraph/utils.py | 4 +- typegraph/python/typegraph/wit.py | 2 +- 39 files changed, 201 insertions(+), 224 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index 86175931ef..206fcbe54c 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -4,11 +4,7 @@ "website/.docusaurus": true, "website/node_modules": true }, - "deno.enablePaths": [ - "typegate", - "dev", - "examples/templates/deno", - ], + "deno.enablePaths": ["typegate", "dev", "examples/templates/deno"], "deno.unstable": true, "typescript.suggest.completeFunctionCalls": true, "typescript.inlayHints.variableTypes.enabled": true, @@ -56,8 +52,6 @@ "wasmedge" ], "python.languageServer": "Pylance", - "python.analysis.extraPaths": [ - "typegraph/python" - ], - "prettier.proseWrap": "never", + "python.analysis.extraPaths": ["typegraph/python"], + "prettier.proseWrap": "never" } diff --git a/Cargo.lock b/Cargo.lock index 503cf3a601..68b8f762e8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1709,6 +1709,7 @@ dependencies = [ "serde_with 3.8.1", "tar", "thiserror", + "url", ] [[package]] diff --git a/libs/common/Cargo.toml b/libs/common/Cargo.toml index f2b749a032..42e0ec3fac 100644 --- a/libs/common/Cargo.toml +++ b/libs/common/Cargo.toml @@ -14,6 +14,7 @@ serde_with = "3.4.0" tar.workspace = true ignore = "0.4.20" reqwest = { workspace = true, features = ["json"] } +url = { version = "2.4", features = ["serde"] } async-trait.workspace = true itertools.workspace = true colored = "2.0" diff --git a/libs/metagen/src/mdk_rust/stubs.rs b/libs/metagen/src/mdk_rust/stubs.rs index 05583b5f0d..5c506f2acc 100644 --- a/libs/metagen/src/mdk_rust/stubs.rs +++ b/libs/metagen/src/mdk_rust/stubs.rs @@ -87,7 +87,7 @@ mod test { #[test] fn stub_test() -> anyhow::Result<()> { let tg_name = "my_tg".to_string(); - let tg = Typegraph { + let tg = Box::new(Typegraph { path: None, policies: vec![], deps: vec![], @@ -147,7 +147,7 @@ mod test { }, }, ], - }; + }); let generator = Generator::new(MdkRustGenConfig { base: crate::config::MdkGeneratorConfigBase { path: "/".into(), diff --git a/libs/metagen/src/tests/fixtures.rs b/libs/metagen/src/tests/fixtures.rs index 72af97564b..6145cafaad 100644 --- a/libs/metagen/src/tests/fixtures.rs +++ b/libs/metagen/src/tests/fixtures.rs @@ -4,7 +4,7 @@ use crate::interlude::*; use common::typegraph::*; -pub async fn test_typegraph_1() -> anyhow::Result { +pub async fn test_typegraph_1() -> anyhow::Result> { let out = tokio::process::Command::new("cargo") .args( "run -p meta-cli -- serialize -f tests/tg.ts" @@ -15,7 +15,7 @@ pub async fn test_typegraph_1() -> anyhow::Result { .kill_on_drop(true) .output() .await?; - let mut tg: Vec = serde_json::from_slice(&out.stdout) + let mut tg: Vec> = serde_json::from_slice(&out.stdout) .with_context(|| format!("error deserializing typegraph: {out:?}"))?; Ok(tg.pop().unwrap()) } diff --git a/libs/metagen/src/tests/mod.rs b/libs/metagen/src/tests/mod.rs index 5539f86e4c..f86667e56c 100644 --- a/libs/metagen/src/tests/mod.rs +++ b/libs/metagen/src/tests/mod.rs @@ -8,7 +8,7 @@ pub use fixtures::*; #[derive(Clone)] struct TestCtx { - typegraphs: Arc>, + typegraphs: Arc>>, } impl InputResolver for TestCtx { @@ -34,7 +34,7 @@ pub struct E2eTestCase { pub target: String, pub config: config::Config, pub target_dir: PathBuf, - pub typegraphs: HashMap, + pub typegraphs: HashMap>, pub build_fn: fn(BuildArgs) -> BoxFuture>, } diff --git a/meta-cli/src/cli/deploy.rs b/meta-cli/src/cli/deploy.rs index 9eb8a73967..42c98461d7 100644 --- a/meta-cli/src/cli/deploy.rs +++ b/meta-cli/src/cli/deploy.rs @@ -11,7 +11,6 @@ use crate::interlude::*; use crate::secrets::{RawSecrets, Secrets}; use clap::Parser; use common::node::Node; -use owo_colors::OwoColorize; #[derive(Parser, Debug)] pub struct DeploySubcommand { @@ -160,9 +159,8 @@ impl Action for DeploySubcommand { } else { trace!("running in default mode"); // deploy a single file - let status = default_mode::run(deploy).await?; - status + default_mode::run(deploy).await? }; match status { @@ -262,7 +260,7 @@ mod watch_mode { #[tracing::instrument] pub async fn enter_watch_mode(deploy: Deploy) -> Result<()> { - if let Some(_) = &deploy.file { + if deploy.file.is_some() { bail!("Cannot use --file in watch mode"); } diff --git a/meta-cli/src/cli/gen.rs b/meta-cli/src/cli/gen.rs index 5114450132..d47e66a3a5 100644 --- a/meta-cli/src/cli/gen.rs +++ b/meta-cli/src/cli/gen.rs @@ -1,6 +1,7 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 +use super::serialize::SerializeReportExt; use crate::cli::{Action, ConfigArgs, NodeArgs}; use crate::config::PathOption; use crate::deploy::actors::task::serialize::{SerializeAction, SerializeActionGenerator}; @@ -8,11 +9,9 @@ use crate::deploy::actors::task_manager::{TaskManagerInit, TaskSource}; use crate::interlude::*; use crate::{com::store::ServerStore, config::Config, deploy::actors::console::ConsoleActor}; use actix::Actor; -use clap::{Parser, ValueEnum}; +use clap::Parser; use common::typegraph::Typegraph; use metagen::*; -use serde_json::json; -use super::serialize::SerializeReportExt; #[derive(Parser, Debug, Clone)] pub struct Gen { diff --git a/meta-cli/src/cli/serialize.rs b/meta-cli/src/cli/serialize.rs index 6411f0f379..04a53f6184 100644 --- a/meta-cli/src/cli/serialize.rs +++ b/meta-cli/src/cli/serialize.rs @@ -125,6 +125,7 @@ impl Action for Serialize { } pub trait SerializeReportExt { + #[allow(clippy::vec_box)] fn into_typegraphs(self) -> Vec>; } @@ -132,7 +133,7 @@ impl SerializeReportExt for Report { fn into_typegraphs(self) -> Vec> { self.entries .into_iter() - .map(|entry| match entry.status { + .flat_map(|entry| match entry.status { TaskFinishStatus::Finished(results) => results .into_iter() .map(|(_, v)| v) @@ -155,7 +156,6 @@ impl SerializeReportExt for Report { vec![] } }) - .flatten() .collect() } } diff --git a/meta-cli/src/com/responses.rs b/meta-cli/src/com/responses.rs index 71188ab44c..e3dfea0ed7 100644 --- a/meta-cli/src/com/responses.rs +++ b/meta-cli/src/com/responses.rs @@ -29,12 +29,3 @@ pub struct SDKResponse { pub data: Option, pub error: Option, } - -#[derive(Deserialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] -pub struct SDKError { - code: String, - msg: String, - #[allow(unused)] - value: serde_json::Value, -} diff --git a/meta-cli/src/deploy/actors/discovery.rs b/meta-cli/src/deploy/actors/discovery.rs index a90235315d..7551c80ec6 100644 --- a/meta-cli/src/deploy/actors/discovery.rs +++ b/meta-cli/src/deploy/actors/discovery.rs @@ -62,7 +62,7 @@ impl Actor for DiscoveryActor { match Discovery::new(config, dir.to_path_buf()) .start(|path| match path { Ok(path) => { - let rel_path = diff_paths(&path, &dir).unwrap(); + let rel_path = diff_paths(path, &dir).unwrap(); task_manager.do_send(task_manager::message::AddTask { task_ref: task_generator.generate(rel_path.into(), 0), reason: TaskReason::Discovery, diff --git a/meta-cli/src/deploy/actors/task.rs b/meta-cli/src/deploy/actors/task.rs index 8e4827a08f..3ec264d629 100644 --- a/meta-cli/src/deploy/actors/task.rs +++ b/meta-cli/src/deploy/actors/task.rs @@ -126,9 +126,9 @@ where s.parse::() .map_err(|_| ()) .and_then(|n| if n >= 1 { Ok(n) } else { Err(()) }) - .expect(&format!( - "{TIMEOUT_ENV_NAME} env value must be a positive integer" - )) + .unwrap_or_else(|_| { + panic!("{TIMEOUT_ENV_NAME} env value must be a positive integer") + }) }) .unwrap_or(DEFAULT_TIMEOUT), ), @@ -213,7 +213,7 @@ impl Handler for TaskActor { self.process = Some(child); let addr = ctx.address(); - let timeout_duration = self.timeout_duration.clone(); + let timeout_duration = self.timeout_duration; let path = self.get_path_owned(); let console = self.console.clone(); let fut = async move { @@ -260,7 +260,7 @@ impl Handler> for TaskActor { self.console .error(format!("task process not found for {:?}", self.get_path())); ctx.address().do_send(Exit(TaskFinishStatus::::Error)); - return (); + return; }; let addr = ctx.address(); @@ -321,12 +321,10 @@ impl Handler> for TaskActor { let fut = async move { let mut followup: Option = None; for result in &results.0 { - match action.finalize(result, finalize_ctx.clone()).await { - Ok(Some(followup_opt)) => { - let followup = followup.get_or_insert_with(Default::default); - followup_opt.add_to_options(followup); - } - _ => (), + if let Ok(Some(followup_opt)) = action.finalize(result, finalize_ctx.clone()).await + { + let followup = followup.get_or_insert_with(Default::default); + followup_opt.add_to_options(followup); } } self_addr.do_send(message::UpdateResults(results.0)); diff --git a/meta-cli/src/deploy/actors/task/action.rs b/meta-cli/src/deploy/actors/task/action.rs index f8b34a0aab..abfd7c8d23 100644 --- a/meta-cli/src/deploy/actors/task/action.rs +++ b/meta-cli/src/deploy/actors/task/action.rs @@ -85,7 +85,7 @@ pub trait TaskAction: std::fmt::Debug + Clone + Send + Unpin { type Generator: TaskActionGenerator + Unpin; type RpcCall: serde::de::DeserializeOwned + std::fmt::Debug + Unpin + Send; - async fn get_command(&self) -> Result; + fn get_command(&self) -> impl Future>; fn get_task_ref(&self) -> &TaskRef; fn get_options(&self) -> &Self::Options; @@ -94,16 +94,19 @@ pub trait TaskAction: std::fmt::Debug + Clone + Send + Unpin { fn get_error_message(&self, err: &str) -> String; /// returns followup task options - async fn finalize( + fn finalize( &self, res: &Result, ctx: ActionFinalizeContext, - ) -> Result>>>; + ) -> impl Future>>>>; - async fn get_rpc_response(&self, call: &Self::RpcCall) -> Result; + fn get_rpc_response( + &self, + call: &Self::RpcCall, + ) -> impl Future>; } -pub type ActionResult = Result; +pub type ActionResult = Result<::SuccessData, ::FailureData>; pub fn get_typegraph_name(res: &ActionResult) -> String { match res { diff --git a/meta-cli/src/deploy/actors/task/deploy.rs b/meta-cli/src/deploy/actors/task/deploy.rs index 9631b55c1e..e200e91fc3 100644 --- a/meta-cli/src/deploy/actors/task/deploy.rs +++ b/meta-cli/src/deploy/actors/task/deploy.rs @@ -124,8 +124,6 @@ pub struct DeploySuccess { pub struct DeployError { typegraph: String, error: String, - #[serde(default)] - follow_up: Option, // todo migration } impl OutputData for DeploySuccess { @@ -300,7 +298,7 @@ impl TaskAction for DeployAction { match call { RpcCall::GetDeployTarget => { let deploy_target: &Node = &self.deploy_target; - Ok(serde_json::to_value(&deploy_target)?) + Ok(serde_json::to_value(deploy_target)?) } RpcCall::GetDeployData { typegraph } => Ok(self.get_deploy_data(typegraph)), diff --git a/meta-cli/src/deploy/actors/task_io.rs b/meta-cli/src/deploy/actors/task_io.rs index a7d9aec794..c1a69ff527 100644 --- a/meta-cli/src/deploy/actors/task_io.rs +++ b/meta-cli/src/deploy/actors/task_io.rs @@ -47,6 +47,7 @@ enum JsonRpcVersion { #[derive(Deserialize, Debug)] struct RpcRequest { + #[allow(dead_code)] jsonrpc: JsonRpcVersion, id: u32, #[serde(flatten)] @@ -272,8 +273,8 @@ impl TaskIoActor { } Err(err) => { self.console.error(format!( - "{scope} invalid jsonrpc request {req:?}: {err}", - scope = format!("[{path}]", path = self.get_console_scope()) + "[{path}] invalid jsonrpc request {req:?}: {err}", + path = self.get_console_scope(), )); } } diff --git a/meta-cli/src/deploy/actors/task_manager.rs b/meta-cli/src/deploy/actors/task_manager.rs index d1eda2f051..8bda9cfb70 100644 --- a/meta-cli/src/deploy/actors/task_manager.rs +++ b/meta-cli/src/deploy/actors/task_manager.rs @@ -57,11 +57,6 @@ pub enum StopSchedule { Automatic, } -enum Status { - Default, - Stopping, // waiting for active tasks to finish; cancel pending tasks -} - #[derive(Clone, Debug)] pub enum StopReason { Natural, @@ -182,7 +177,8 @@ impl TaskManagerInit { let watcher_addr = self.start_source(addr, task_generator.clone()); let console = self.console.clone(); - let task_manager = TaskManager:: { + + TaskManager:: { init_params: self, task_generator, active_tasks: Default::default(), @@ -193,9 +189,7 @@ impl TaskManagerInit { reports: IndexMap::new(), watcher_addr, console, - }; - - task_manager + } }); report_rx.await.expect("task manager has been dropped") @@ -394,7 +388,7 @@ impl Handler> for TaskManager { } TaskFinishStatus::Finished(results) => { // TODO partial retry - if multiple typegraphs in a single file - if results.iter().any(|r| matches!(r.1, Err(_))) { + if results.iter().any(|r| r.1.is_err()) { next_retry_no = Some(message.task_ref.retry_no + 1); } } @@ -415,13 +409,8 @@ impl Handler> for TaskManager { self.console.debug("all tasks finished".to_string()); self.stop_reason = Some(StopReason::Natural); ctx.stop(); - } else { - match self.stop_reason { - Some(StopReason::Manual) => { - ctx.stop(); - } - _ => {} - } + } else if let Some(StopReason::Manual) = self.stop_reason { + ctx.stop(); } } } diff --git a/meta-cli/src/deploy/push/pusher.rs b/meta-cli/src/deploy/push/pusher.rs index c8331d0e60..844f453ec8 100644 --- a/meta-cli/src/deploy/push/pusher.rs +++ b/meta-cli/src/deploy/push/pusher.rs @@ -65,12 +65,6 @@ struct ResolveNullConstraintViolation { failure: NullConstraintViolation, } -#[derive(Deserialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] -pub struct GenericPushFailure { - message: String, -} - lazy_static! { static ref RETRY_COUNTERS: Mutex>> = Mutex::new(HashMap::new()); } diff --git a/meta-cli/src/main.rs b/meta-cli/src/main.rs index 6b99a54cda..502e4eebf3 100644 --- a/meta-cli/src/main.rs +++ b/meta-cli/src/main.rs @@ -111,9 +111,7 @@ fn main() -> Result<()> { cli::Commands::Serialize(_) | cli::Commands::Dev(_) | cli::Commands::Deploy(_) - | cli::Commands::Gen(_) => { - command.run(args.config).await - } + | cli::Commands::Gen(_) => command.run(args.config).await, _ => command.run(args.config).await.map(|_| ()), } })?, diff --git a/meta-cli/src/typegraph/loader/discovery.rs b/meta-cli/src/typegraph/loader/discovery.rs index 4e171718c4..556958dab2 100644 --- a/meta-cli/src/typegraph/loader/discovery.rs +++ b/meta-cli/src/typegraph/loader/discovery.rs @@ -94,10 +94,6 @@ pub struct GlobFilter { exclude_set: GlobSet, } -struct SpecificFilters { - globs: GlobFilter, -} - pub struct FileFilter { base_dir: PathBuf, gitignore: Option, diff --git a/typegate/tests/e2e/cli/deploy_test.ts b/typegate/tests/e2e/cli/deploy_test.ts index 1270efb05a..5178dca8a6 100644 --- a/typegate/tests/e2e/cli/deploy_test.ts +++ b/typegate/tests/e2e/cli/deploy_test.ts @@ -145,8 +145,7 @@ Meta.test( const port = t.port!; const { connStr, schema } = randomPGConnStr(); const secrets = { - POSTGRES: - connStr, + POSTGRES: connStr, }; await t.should("load first version of the typegraph", async () => { await reset(tgName, schema); @@ -203,8 +202,7 @@ Meta.test( const { connStr, schema } = randomPGConnStr(); const e = await t.engine("prisma.py", { secrets: { - POSTGRES: - connStr, + POSTGRES: connStr, }, }); @@ -253,22 +251,22 @@ Meta.test( "--create-migration", ]); - await t.should( - "have replaced and terminated the previous engine", - async () => { - await gql` + await t.should( + "have replaced and terminated the previous engine", + async () => { + await gql` query { findManyRecords { id } } ` - .expectData({ + .expectData({ findManyRecords: [], }) - .on(e); - }, - ); + .on(e); + }, + ); const e2 = t.getTypegraphEngine("prisma")!; @@ -304,8 +302,7 @@ Meta.test( const { connStr, schema } = randomPGConnStr(); const e = await t.engine("prisma.py", { secrets: { - POSTGRES: - connStr, + POSTGRES: connStr, }, prefix: "pref-", }); @@ -344,22 +341,22 @@ Meta.test( "--allow-dirty", ]); - await t.should( - "succeed have replaced and terminated the previous engine", - async () => { - await gql` + await t.should( + "succeed have replaced and terminated the previous engine", + async () => { + await gql` query { findManyRecords { id } } ` - .expectData({ + .expectData({ findManyRecords: [], }) - .on(e); - }, - ); + .on(e); + }, + ); const e2 = t.getTypegraphEngine("pref-prisma")!; diff --git a/typegate/tests/utils/tg_deploy_script.py b/typegate/tests/utils/tg_deploy_script.py index 86dea43e68..bbf698b589 100644 --- a/typegate/tests/utils/tg_deploy_script.py +++ b/typegate/tests/utils/tg_deploy_script.py @@ -4,12 +4,14 @@ import sys from typegraph.gen.exports.core import ( - ArtifactResolutionConfig, MigrationAction, - MigrationConfig, ) from typegraph.graph.shared_types import BasicAuth -from typegraph.graph.tg_deploy import TypegraphDeployParams, tg_deploy +from typegraph.graph.tg_deploy import ( + TypegraphDeployParams, + tg_deploy, + TypegateConnectionOptions, +) # get command args cwd = sys.argv[1] @@ -39,7 +41,7 @@ ) -tg_func = getattr(module, tg_name) +tg = getattr(module, tg_name) secrets = json.loads(secrets_str) @@ -56,26 +58,16 @@ global_action_create = global_action_create == "true" -tg = tg_func() deploy_result = tg_deploy( tg, TypegraphDeployParams( - base_url=gate, - auth=auth, + typegate=TypegateConnectionOptions(url=gate, auth=auth), typegraph_path=os.path.join(cwd, module_name), secrets=secrets, - artifacts_config=ArtifactResolutionConfig( - dir=cwd, - prefix=None, - disable_artifact_resolution=disable_art_resol, - codegen=codegen, - prisma_migration=MigrationConfig( - migration_dir=migration_dir, - global_action=MigrationAction( - reset=global_action_reset, create=global_action_create - ), - runtime_actions=None, - ), + migrations_dir=migration_dir, + migration_actions=None, + default_migration_action=MigrationAction( + apply=True, reset=global_action_reset, create=global_action_create ), ), ) diff --git a/typegraph/core/src/global_store.rs b/typegraph/core/src/global_store.rs index 3628b0f7c9..416ac8c631 100644 --- a/typegraph/core/src/global_store.rs +++ b/typegraph/core/src/global_store.rs @@ -127,7 +127,7 @@ pub struct NameRegistration(pub bool); #[cfg(test)] impl Store { pub fn reset() { - let _ = crate::typegraph::finalize(None); + let _ = crate::typegraph::serialize(Default::default()); with_store_mut(|s| *s = Store::new()); } } diff --git a/typegraph/core/src/lib.rs b/typegraph/core/src/lib.rs index e27c57daad..d42d1ed06b 100644 --- a/typegraph/core/src/lib.rs +++ b/typegraph/core/src/lib.rs @@ -31,7 +31,7 @@ use types::{ use utils::clear_name; use wit::core::{ - Artifact, ContextCheck, FinalizeParams, Policy, PolicyId, PolicySpec, TransformData, TypeBase, + Artifact, ContextCheck, Policy, PolicyId, PolicySpec, SerializeParams, TransformData, TypeBase, TypeEither, TypeFile, TypeFloat, TypeFunc, TypeId as CoreTypeId, TypeInteger, TypeList, TypeOptional, TypeString, TypeStruct, TypeUnion, TypegraphInitParams, }; @@ -53,8 +53,8 @@ impl wit::core::Guest for Lib { typegraph::init(params) } - fn finalize_typegraph(res_config: FinalizeParams) -> Result<(String, Vec)> { - typegraph::finalize(res_config) + fn serialize_typegraph(res_config: SerializeParams) -> Result<(String, Vec)> { + typegraph::serialize(res_config) } fn refb(name: String, attributes: Vec<(String, String)>) -> Result { @@ -549,8 +549,7 @@ mod tests { use crate::global_store::Store; use crate::t::{self, TypeBuilder}; use crate::test_utils::setup; - use crate::wit::core::Cors; - use crate::wit::core::Guest; + use crate::wit::core::{Cors, Guest, MigrationAction, PrismaMigrationConfig, SerializeParams}; use crate::wit::runtimes::{Effect, Guest as GuestRuntimes, MaterializerDenoFunc}; use crate::Lib; use crate::TypegraphInitParams; @@ -575,6 +574,27 @@ mod tests { } } + impl Default for SerializeParams { + fn default() -> Self { + Self { + typegraph_path: "some/dummy/path".to_string(), + prefix: None, + artifact_resolution: false, + codegen: false, + pretty: true, + prisma_migration: PrismaMigrationConfig { + migrations_dir: "".to_string(), + migration_actions: vec![], + default_migration_action: MigrationAction { + apply: false, + create: false, + reset: false, + }, + }, + } + } + } + #[test] fn test_integer_invalid_max() { let res = t::integer().min(12).max(10).build(); @@ -621,7 +641,7 @@ mod tests { crate::test_utils::setup(Some("test-2")), Err(errors::nested_typegraph_context("test-1")) ); - Lib::finalize_typegraph(None)?; + Lib::serialize_typegraph(Default::default())?; Ok(()) } @@ -634,7 +654,7 @@ mod tests { ); assert!( - matches!(Lib::finalize_typegraph(None), Err(e) if e == errors::expected_typegraph_context()) + matches!(Lib::serialize_typegraph(Default::default()), Err(e) if e == errors::expected_typegraph_context()) ); Ok(()) @@ -728,7 +748,7 @@ mod tests { let mat = Lib::register_deno_func(MaterializerDenoFunc::with_code("() => 12"), Effect::Read)?; Lib::expose(vec![("one".to_string(), t::func(s, b, mat)?.into())], None)?; - let typegraph = Lib::finalize_typegraph(None)?; + let typegraph = Lib::serialize_typegraph(Default::default())?; insta::assert_snapshot!(typegraph.0); Ok(()) } diff --git a/typegraph/core/src/typegraph.rs b/typegraph/core/src/typegraph.rs index 53544ad585..582af944f0 100644 --- a/typegraph/core/src/typegraph.rs +++ b/typegraph/core/src/typegraph.rs @@ -27,8 +27,8 @@ use std::hash::Hasher as _; use std::rc::Rc; use crate::wit::core::{ - Artifact as WitArtifact, Error as TgError, FinalizeParams, Guest, MaterializerId, PolicyId, - PolicySpec, RuntimeId, TypegraphInitParams, + Artifact as WitArtifact, Error as TgError, Guest, MaterializerId, PolicyId, PolicySpec, + RuntimeId, SerializeParams, TypegraphInitParams, }; #[derive(Default)] @@ -181,9 +181,9 @@ pub fn finalize_auths(ctx: &mut TypegraphContext) -> Result>>() } -pub fn finalize(params: FinalizeParams) -> Result<(String, Vec)> { +pub fn serialize(params: SerializeParams) -> Result<(String, Vec)> { #[cfg(test)] - eprintln!("Finalizing typegraph..."); + eprintln!("Serializing typegraph typegraph..."); let mut ctx = TG.with(|tg| { tg.borrow_mut() @@ -227,6 +227,7 @@ pub fn finalize(params: FinalizeParams) -> Result<(String, Vec)> { tg.meta.prefix = params.prefix.clone(); + let pretty = params.pretty; TypegraphPostProcessor::new(params).postprocess(&mut tg)?; let artifacts = tg @@ -239,7 +240,13 @@ pub fn finalize(params: FinalizeParams) -> Result<(String, Vec)> { Store::restore(ctx.saved_store_state.unwrap()); - let result = match serde_json::to_string(&tg).map_err(|e| e.to_string().into()) { + let result = if pretty { + serde_json::to_string_pretty(&tg) + } else { + serde_json::to_string(&tg) + }; + + let result = match result.map_err(|e| e.to_string().into()) { Ok(res) => res, Err(e) => return Err(e), }; diff --git a/typegraph/core/src/utils/postprocess/mod.rs b/typegraph/core/src/utils/postprocess/mod.rs index 7a491aa1ab..8a3699344b 100644 --- a/typegraph/core/src/utils/postprocess/mod.rs +++ b/typegraph/core/src/utils/postprocess/mod.rs @@ -1,7 +1,7 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use crate::{global_store::Store, utils::fs_host, wit::core::FinalizeParams}; +use crate::{global_store::Store, utils::fs_host, wit::core::SerializeParams}; use common::typegraph::Typegraph; use std::path::{Path, PathBuf}; @@ -24,11 +24,11 @@ pub trait PostProcessor { /// Compose all postprocessors pub struct TypegraphPostProcessor { - config: FinalizeParams, + config: SerializeParams, } impl TypegraphPostProcessor { - pub fn new(config: FinalizeParams) -> Self { + pub fn new(config: SerializeParams) -> Self { Self { config } } } diff --git a/typegraph/core/src/utils/postprocess/prisma_rt.rs b/typegraph/core/src/utils/postprocess/prisma_rt.rs index 751972d6a6..907386386f 100644 --- a/typegraph/core/src/utils/postprocess/prisma_rt.rs +++ b/typegraph/core/src/utils/postprocess/prisma_rt.rs @@ -10,7 +10,7 @@ use crate::utils::fs_host; use crate::utils::postprocess::PostProcessor; use crate::wit::core::MigrationAction; use crate::wit::core::PrismaMigrationConfig; -use crate::wit::metatype::typegraph::host::{eprint, path_exists}; +use crate::wit::metatype::typegraph::host::path_exists; pub struct PrismaProcessor { config: PrismaMigrationConfig, @@ -41,13 +41,17 @@ impl PrismaProcessor { rt_data.migration_options = Some(MigrationOptions { migration_files: { - let path = fs_host::make_absolute(&path)?; - match path_exists(&path.display().to_string())? { - true => { - let base64 = fs_host::compress_and_encode_base64(path)?; - Some(base64) + if action.apply { + let path = fs_host::make_absolute(&path)?; + match path_exists(&path.display().to_string())? { + true => { + let base64 = fs_host::compress_and_encode_base64(path)?; + Some(base64) + } + false => None, } - false => None, + } else { + None } }, create: action.create, @@ -64,14 +68,8 @@ impl PrismaProcessor { self.config .migration_actions .iter() - .filter_map(|(rt, action)| { - if rt == name { - Some(action.clone()) - } else { - None - } - }) + .filter_map(|(rt, action)| if rt == name { Some(*action) } else { None }) .last() - .unwrap_or(self.config.default_migration_action.clone()) + .unwrap_or(self.config.default_migration_action) } } diff --git a/typegraph/core/wit/typegraph.wit b/typegraph/core/wit/typegraph.wit index 9e2c0cf615..43b29ea9b7 100644 --- a/typegraph/core/wit/typegraph.wit +++ b/typegraph/core/wit/typegraph.wit @@ -42,17 +42,6 @@ interface core { init-typegraph: func(params: typegraph-init-params) -> result<_, error>; - // record migration-action { - // create: bool, - // reset: bool - // } - - // record migration-config { - // migration-dir: string, - // global-action: migration-action, // global config (all runtimes) - // runtime-actions: option>> // config per runtime name (override global-action) - // } - record migration-action { apply: bool, create: bool, @@ -65,15 +54,16 @@ interface core { default-migration-action: migration-action, } - record finalize-params { + record serialize-params { typegraph-path: string, prefix: option, artifact-resolution: bool, codegen: bool, prisma-migration: prisma-migration-config, + pretty: bool, } - finalize-typegraph: func(params: finalize-params) -> result>, error>; + serialize-typegraph: func(params: serialize-params) -> result>, error>; type type-id = u32; record type-base { @@ -236,7 +226,7 @@ interface core { expose: func(fns: list>, default-policy: option>) -> result<_, error>; set-seed: func(seed: option) -> result<_, error>; - + type runtime-id = u32; type materializer-id = u32; @@ -249,7 +239,7 @@ interface core { interface runtimes { use core.{error, type-id, func-params, runtime-id, materializer-id, artifact}; - + get-deno-runtime: func() -> runtime-id; type idempotency = bool; @@ -534,10 +524,10 @@ interface utils { // Example: // reduce({a: 1, { b: {c: g.inherit(), d: [1, 2, 3]}}) // produces a list of reduce-path - // [ - // { path: [a], value: { inherit: false, payload: 1 } } - // { path: [a, b, c], value: { inherit: true } } - // { path: [a, b, d], value: { inherit: false, payload: [1, 2, 3] } } + // [ + // { path: [a], value: { inherit: false, payload: 1 } } + // { path: [a, b, c], value: { inherit: true } } + // { path: [a, b, d], value: { inherit: false, payload: [1, 2, 3] } } // ] record reduce-value { inherit: bool, @@ -595,7 +585,7 @@ interface utils { record mdk-config { workspace-path: string, target-name: string, - config-json: string, + config-json: string, tg-json: string, } diff --git a/typegraph/node/sdk/src/metagen.ts b/typegraph/node/sdk/src/metagen.ts index f3f6def0cd..52309cb3ad 100644 --- a/typegraph/node/sdk/src/metagen.ts +++ b/typegraph/node/sdk/src/metagen.ts @@ -1,7 +1,7 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -import { FinalizeParams } from "./gen/interfaces/metatype-typegraph-core.js"; +import { SerializeParams } from "./gen/interfaces/metatype-typegraph-core.js"; import { TypegraphOutput } from "./typegraph.js"; import { wit_utils } from "./wit.js"; import { freezeTgOutput, getEnvVariable } from "./utils/func_utils.js"; @@ -10,7 +10,7 @@ import { MdkOutput, } from "./gen/interfaces/metatype-typegraph-utils.js"; -const finalizeParams = { +const serializeParams = { // TODO env variable key constants.js typegraphPath: getEnvVariable("MCLI_TG_PATH")!, prefix: undefined, @@ -25,7 +25,8 @@ const finalizeParams = { reset: false, }, }, -} satisfies FinalizeParams; + pretty: false, +} satisfies SerializeParams; export class Metagen { constructor( @@ -34,10 +35,10 @@ export class Metagen { ) {} private getMdkConfig(tgOutput: TypegraphOutput, targetName: string) { - const frozenOut = freezeTgOutput(finalizeParams, tgOutput); + const frozenOut = freezeTgOutput(serializeParams, tgOutput); return { configJson: JSON.stringify(this.genConfig), - tgJson: frozenOut.serialize(finalizeParams).tgJson, + tgJson: frozenOut.serialize(serializeParams).tgJson, targetName, workspacePath: this.workspacePath, } as MdkConfig; diff --git a/typegraph/node/sdk/src/tg_deploy.ts b/typegraph/node/sdk/src/tg_deploy.ts index 18540a56ce..818a422f4f 100644 --- a/typegraph/node/sdk/src/tg_deploy.ts +++ b/typegraph/node/sdk/src/tg_deploy.ts @@ -2,8 +2,8 @@ // SPDX-License-Identifier: MPL-2.0 import { - FinalizeParams, MigrationAction, + SerializeParams, } from "./gen/interfaces/metatype-typegraph-core.js"; import { ArtifactUploader } from "./tg_artifact_upload.js"; import { TypegraphOutput } from "./typegraph.js"; @@ -73,7 +73,8 @@ export async function tgDeploy( reset: false, }, }, - } satisfies FinalizeParams; + pretty: false, + } satisfies SerializeParams; const serialized = typegraph.serialize(serializeParams); const tgJson = serialized.tgJson; const refArtifacts = serialized.ref_artifacts; diff --git a/typegraph/node/sdk/src/tg_manage.ts b/typegraph/node/sdk/src/tg_manage.ts index f75897769d..f7cc9181c7 100644 --- a/typegraph/node/sdk/src/tg_manage.ts +++ b/typegraph/node/sdk/src/tg_manage.ts @@ -1,7 +1,7 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -import { FinalizeParams } from "./gen/interfaces/metatype-typegraph-core.js"; +import { SerializeParams } from "./gen/interfaces/metatype-typegraph-core.js"; import { BasicAuth, tgDeploy } from "./tg_deploy.js"; import { TgFinalizationResult, TypegraphOutput } from "./typegraph.js"; import { freezeTgOutput } from "./utils/func_utils.js"; @@ -50,6 +50,7 @@ export class Manager { reset: false, }, }, + pretty: false, }); log.success(finalizationResult.tgJson, true); } catch (err: any) { @@ -64,7 +65,7 @@ export class Manager { const deployData = await rpc.getDeployData(this.#typegraph.name); const env = this.#env; - const params: FinalizeParams = { + const params: SerializeParams = { typegraphPath: env.typegraph_path, prefix: env.prefix, artifactResolution: true, @@ -74,6 +75,7 @@ export class Manager { migrationActions: Object.entries(deployData.migrationActions), defaultMigrationAction: deployData.defaultMigrationAction, }, + pretty: false, }; // hack for allowing tg.serialize(config) to be called more than once diff --git a/typegraph/node/sdk/src/typegraph.ts b/typegraph/node/sdk/src/typegraph.ts index c034f23acb..eb574acd08 100644 --- a/typegraph/node/sdk/src/typegraph.ts +++ b/typegraph/node/sdk/src/typegraph.ts @@ -14,7 +14,7 @@ import { Auth, Cors as CorsWit, Rate, wit_utils } from "./wit.js"; import { getPolicyChain } from "./types.js"; import { Artifact, - FinalizeParams, + SerializeParams, } from "./gen/interfaces/metatype-typegraph-core.js"; import { Manager } from "./tg_manage.js"; import { log } from "./io.js"; @@ -125,7 +125,7 @@ export class RawAuth { } export interface TypegraphOutput { - serialize: (config: FinalizeParams) => TgFinalizationResult; + serialize: (config: SerializeParams) => TgFinalizationResult; name: string; } @@ -220,9 +220,9 @@ export async function typegraph( builder(g); const ret = { - serialize(config: FinalizeParams) { + serialize(config: SerializeParams) { try { - const [tgJson, ref_artifacts] = core.finalizeTypegraph(config); + const [tgJson, ref_artifacts] = core.serializeTypegraph(config); const result: TgFinalizationResult = { tgJson: tgJson, ref_artifacts: ref_artifacts, diff --git a/typegraph/node/sdk/src/utils/func_utils.ts b/typegraph/node/sdk/src/utils/func_utils.ts index 97fdc3fe89..eab345d749 100644 --- a/typegraph/node/sdk/src/utils/func_utils.ts +++ b/typegraph/node/sdk/src/utils/func_utils.ts @@ -8,7 +8,7 @@ import { } from "../typegraph.js"; import { ReducePath } from "../gen/interfaces/metatype-typegraph-utils.js"; import { serializeStaticInjection } from "./injection_utils.js"; -import { FinalizeParams } from "../gen/interfaces/metatype-typegraph-core.js"; +import { SerializeParams } from "../gen/interfaces/metatype-typegraph-core.js"; import { log } from "../io.js"; export function stringifySymbol(symbol: symbol) { @@ -86,7 +86,7 @@ const frozenMemo: Record = {}; /** Create a reusable version of a `TypegraphOutput` */ export function freezeTgOutput( - config: FinalizeParams, + config: SerializeParams, tgOutput: TypegraphOutput, ): TypegraphOutput { frozenMemo[tgOutput.name] = frozenMemo[tgOutput.name] ?? diff --git a/typegraph/python/typegraph/graph/metagen.py b/typegraph/python/typegraph/graph/metagen.py index 5b8317eb6f..da6df628f7 100644 --- a/typegraph/python/typegraph/graph/metagen.py +++ b/typegraph/python/typegraph/graph/metagen.py @@ -5,7 +5,7 @@ from typing import List, Union from typegraph.gen.exports.core import ( MigrationAction, - FinalizeParams, + SerializeParams, PrismaMigrationConfig, ) from typegraph.gen.exports.utils import MdkConfig, MdkOutput @@ -19,7 +19,7 @@ if _tg_path is None: raise Exception("MCLI_TG_PATH not set") -finalize_params = FinalizeParams( +serialize_params = SerializeParams( typegraph_path=_tg_path, prefix=None, artifact_resolution=False, @@ -27,8 +27,11 @@ prisma_migration=PrismaMigrationConfig( migrations_dir="prisma-migrations", migration_actions=[], - default_migration_action=MigrationAction(apply=False, create=False, reset=False), - ) + default_migration_action=MigrationAction( + apply=False, create=False, reset=False + ), + ), + pretty=False, ) @@ -45,9 +48,9 @@ def _get_mdk_config( tg_output: TypegraphOutput, target_name: str, ) -> MdkConfig: - frozen_out = freeze_tg_output(finalize_params, tg_output) + frozen_out = freeze_tg_output(serialize_params, tg_output) return MdkConfig( - tg_json=frozen_out.serialize(finalize_params).tgJson, + tg_json=frozen_out.serialize(serialize_params).tgJson, config_json=json.dumps(self.gen_config), workspace_path=self.workspace_path, target_name=target_name, diff --git a/typegraph/python/typegraph/graph/shared_types.py b/typegraph/python/typegraph/graph/shared_types.py index 67c1c3f7dd..cce409a2c8 100644 --- a/typegraph/python/typegraph/graph/shared_types.py +++ b/typegraph/python/typegraph/graph/shared_types.py @@ -5,7 +5,7 @@ from dataclasses import dataclass from typing import Callable, List from typegraph.gen.exports.core import Artifact -from typegraph.wit import FinalizeParams +from typegraph.wit import SerializeParams @dataclass @@ -17,7 +17,7 @@ class FinalizationResult: @dataclass class TypegraphOutput: name: str - serialize: Callable[[FinalizeParams], FinalizationResult] + serialize: Callable[[SerializeParams], FinalizationResult] @dataclass diff --git a/typegraph/python/typegraph/graph/tg_deploy.py b/typegraph/python/typegraph/graph/tg_deploy.py index c579db492a..5562aa06b2 100644 --- a/typegraph/python/typegraph/graph/tg_deploy.py +++ b/typegraph/python/typegraph/graph/tg_deploy.py @@ -13,14 +13,16 @@ from typegraph.graph.shared_types import BasicAuth from typegraph.graph.tg_artifact_upload import ArtifactUploader from typegraph.graph.typegraph import TypegraphOutput -from typegraph.wit import FinalizeParams, store, wit_utils +from typegraph.wit import SerializeParams, store, wit_utils from typegraph import version as sdk_version + @dataclass class TypegateConnectionOptions: url: str auth: Optional[BasicAuth] + @dataclass class TypegraphDeployParams: typegate: TypegateConnectionOptions @@ -61,31 +63,33 @@ def tg_deploy(tg: TypegraphOutput, params: TypegraphDeployParams) -> DeployResul sep = "/" if not typegate.url.endswith("/") else "" url = typegate.url + sep + "typegate" - headers = {"Content-Type": "application/json", "User-Agent": f"TypegraphSdk/{sdk_version} Python/{python_version()}"} + headers = { + "Content-Type": "application/json", + "User-Agent": f"TypegraphSdk/{sdk_version} Python/{python_version()}", + } if typegate.auth is not None: headers["Authorization"] = typegate.auth.as_header_value() - finalize_params = FinalizeParams( + serialize_params = SerializeParams( typegraph_path=params.typegraph_path, prefix=params.prefix, artifact_resolution=True, codegen=False, prisma_migration=PrismaMigrationConfig( migrations_dir=params.migrations_dir or "prisma-migrations", - migration_actions=[(k,v) for k, v in (params.migration_actions or {}).items()], - default_migration_action=params.default_migration_action or MigrationAction( - apply=True, - create=False, - reset=False - ), - ) + migration_actions=[ + (k, v) for k, v in (params.migration_actions or {}).items() + ], + default_migration_action=params.default_migration_action + or MigrationAction(apply=True, create=False, reset=False), + ), + pretty=False, ) - serialized = tg.serialize(finalize_params) + serialized = tg.serialize(serialize_params) tg_json = serialized.tgJson ref_artifacts = serialized.ref_artifacts - if len(ref_artifacts) > 0: # upload the referred artifacts artifact_uploader = ArtifactUploader( diff --git a/typegraph/python/typegraph/graph/tg_manage.py b/typegraph/python/typegraph/graph/tg_manage.py index 54dad14eb9..6ae1eebd61 100644 --- a/typegraph/python/typegraph/graph/tg_manage.py +++ b/typegraph/python/typegraph/graph/tg_manage.py @@ -5,7 +5,7 @@ from pathlib import Path from typegraph.gen.exports.core import ( - FinalizeParams, + SerializeParams, MigrationAction, PrismaMigrationConfig, ) @@ -41,7 +41,7 @@ def get_migration_dir(self): def serialize(self): env = self.env - params = FinalizeParams( + params = SerializeParams( typegraph_path=env.typegraph_path, prefix=env.prefix, artifact_resolution=True, @@ -55,6 +55,7 @@ def serialize(self): reset=False, ), ), + pretty=False, ) try: @@ -68,7 +69,7 @@ def deploy(self): env = self.env deploy_data = Rpc.get_deploy_data(self.typegraph.name) - params = FinalizeParams( + params = SerializeParams( typegraph_path=env.typegraph_path, prefix=env.prefix, artifact_resolution=True, @@ -78,6 +79,7 @@ def deploy(self): migration_actions=list(deploy_data.migration_actions.items()), default_migration_action=deploy_data.default_migration_action, ), + pretty=False, ) # hack for allowing tg.serialize(config) to be called more than once @@ -89,7 +91,6 @@ def deploy(self): Log.failure({"typegraph": self.typegraph.name, "error": str(err)}) return - try: deploy_target = Rpc.get_deploy_target() params = TypegraphDeployParams( diff --git a/typegraph/python/typegraph/graph/typegraph.py b/typegraph/python/typegraph/graph/typegraph.py index be2c0f312d..1cf18d4a7e 100644 --- a/typegraph/python/typegraph/graph/typegraph.py +++ b/typegraph/python/typegraph/graph/typegraph.py @@ -7,7 +7,7 @@ from typing import TYPE_CHECKING, Callable, List, Optional, Union, Any from typegraph.gen.exports.core import ( - FinalizeParams, + SerializeParams, Rate, TypegraphInitParams, ) @@ -193,7 +193,7 @@ def decorator(builder: Callable[[Graph], None]) -> TypegraphOutput: if filter is not None and actual_name not in filter: Log.debug("typegraph '{actual_name}' skipped") - def serialize(params: FinalizeParams): + def serialize(params: SerializeParams): raise Exception("typegraph was filtered out") return TypegraphOutput(name=actual_name, serialize=serialize) @@ -236,9 +236,9 @@ def serialize(params: FinalizeParams): # config is only known at deploy time def serialize_with_artifacts( - config: FinalizeParams, + config: SerializeParams, ): - finalization_result = core.finalize_typegraph(store, config) + finalization_result = core.serialize_typegraph(store, config) if isinstance(finalization_result, Err): raise Exception(finalization_result.value) diff --git a/typegraph/python/typegraph/utils.py b/typegraph/python/typegraph/utils.py index 007dbcd331..d67097c23e 100644 --- a/typegraph/python/typegraph/utils.py +++ b/typegraph/python/typegraph/utils.py @@ -5,7 +5,7 @@ from functools import reduce from typing import Any, Dict, List, Optional, Tuple, Union -from typegraph.gen.exports.core import FinalizeParams +from typegraph.gen.exports.core import SerializeParams from typegraph.gen.exports.utils import ReducePath, ReduceValue from typegraph.graph.shared_types import FinalizationResult, TypegraphOutput from typegraph.injection import InheritDef, serialize_static_injection @@ -84,7 +84,7 @@ def unpack_tarb64(tar_b64: str, dest: str): def freeze_tg_output( - config: FinalizeParams, tg_output: TypegraphOutput + config: SerializeParams, tg_output: TypegraphOutput ) -> TypegraphOutput: if tg_output.name not in frozen_memo: frozen_memo[tg_output.name] = tg_output.serialize(config) diff --git a/typegraph/python/typegraph/wit.py b/typegraph/python/typegraph/wit.py index ba852d23ae..6db8152810 100644 --- a/typegraph/python/typegraph/wit.py +++ b/typegraph/python/typegraph/wit.py @@ -12,7 +12,7 @@ # Make sure the imports are similar to the node implementation from typegraph.gen.exports.core import ( - FinalizeParams, # noqa + SerializeParams, # noqa PrismaMigrationConfig, # noqa MigrationAction, # noqa ) From 82bc03d2394c1cb8a37cb71a86e597b96162cea3 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Tue, 11 Jun 2024 20:45:02 +0300 Subject: [PATCH 19/35] fix pre-commit --- meta-cli/src/com/server.rs | 0 meta-cli/src/deploy/actor-model.drawio.svg | 2 +- meta-cli/src/deploy/actors/loader.rs | 0 meta-cli/src/deploy/actors/task.rs | 2 - .../src/deploy/push/migration_resolution.rs | 44 ------------------- meta-cli/src/deploy/push/mod.rs | 1 - typegate/tests/e2e/cli/dev_test.ts | 3 -- 7 files changed, 1 insertion(+), 51 deletions(-) delete mode 100644 meta-cli/src/com/server.rs delete mode 100644 meta-cli/src/deploy/actors/loader.rs delete mode 100644 meta-cli/src/deploy/push/migration_resolution.rs diff --git a/meta-cli/src/com/server.rs b/meta-cli/src/com/server.rs deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/meta-cli/src/deploy/actor-model.drawio.svg b/meta-cli/src/deploy/actor-model.drawio.svg index 11e7877ab4..26ec8bcac1 100644 --- a/meta-cli/src/deploy/actor-model.drawio.svg +++ b/meta-cli/src/deploy/actor-model.drawio.svg @@ -436,4 +436,4 @@ - \ No newline at end of file + diff --git a/meta-cli/src/deploy/actors/loader.rs b/meta-cli/src/deploy/actors/loader.rs deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/meta-cli/src/deploy/actors/task.rs b/meta-cli/src/deploy/actors/task.rs index 3ec264d629..15acda8376 100644 --- a/meta-cli/src/deploy/actors/task.rs +++ b/meta-cli/src/deploy/actors/task.rs @@ -12,8 +12,6 @@ //! deployment) for each typegraph with for JSON-serialized data. //! - stderr is used for fatal errors that causes the program to exit; mainly //! unhandled exception in JavaScript or Python -//! -//! TODO: manage the communication between the CLI and the task process in the `TaskActor`. pub mod action; mod command; diff --git a/meta-cli/src/deploy/push/migration_resolution.rs b/meta-cli/src/deploy/push/migration_resolution.rs deleted file mode 100644 index dd67260fce..0000000000 --- a/meta-cli/src/deploy/push/migration_resolution.rs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. -// SPDX-License-Identifier: MPL-2.0 - -// use crate::interlude::*; - -// use crate::com::store::{MigrationAction, RuntimeMigrationAction, ServerStore}; - -// DatabaseReset failure - -// #[derive(Debug)] -// pub struct ConfirmDatabaseResetRequired { -// pub typegraph_path: PathBuf, -// pub loader: Addr, -// pub runtime_name: String, -// } - -// impl ConfirmHandler for ConfirmDatabaseResetRequired { -// fn on_confirm(&self) { -// let tg_path = self.typegraph_path.clone(); -// let runtime_name = self.runtime_name.clone(); -// do_force_reset(&self.loader, tg_path, runtime_name); -// } -// } - -// // NullConstraintViolation failure - -// /// Set `reset` to `true` for the specified prisma runtime + re-run the typegraph -// fn do_force_reset(loader: &Addr, tg_path: PathBuf, runtime_name: String) { -// // reset -// let glob_cfg = ServerStore::get_migration_action_glob(); -// ServerStore::set_migration_action( -// tg_path.clone(), -// RuntimeMigrationAction { -// runtime_name, -// action: MigrationAction { -// reset: true, // ! -// create: glob_cfg.create, -// }, -// }, -// ); - -// // reload -// loader.do_send(LoadModule(tg_path.into())); -// } diff --git a/meta-cli/src/deploy/push/mod.rs b/meta-cli/src/deploy/push/mod.rs index c921f07796..3e59ed7acd 100644 --- a/meta-cli/src/deploy/push/mod.rs +++ b/meta-cli/src/deploy/push/mod.rs @@ -1,5 +1,4 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -pub mod migration_resolution; pub mod pusher; diff --git a/typegate/tests/e2e/cli/dev_test.ts b/typegate/tests/e2e/cli/dev_test.ts index b2ddc4f8fb..0b12b073d1 100644 --- a/typegate/tests/e2e/cli/dev_test.ts +++ b/typegate/tests/e2e/cli/dev_test.ts @@ -39,9 +39,6 @@ Meta.test( "metatype.yml": "metatype.yml", }, }, - // // TODO remove this - // sanitizeResources: false, - // sanitizeOps: false, }, async (t) => { const schema = randomSchema(); From 3ac7d14223e8374844c1f4abf2d71ae962760309 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Tue, 11 Jun 2024 20:49:17 +0300 Subject: [PATCH 20/35] fix metagen test --- libs/metagen/metatype.yaml | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 libs/metagen/metatype.yaml diff --git a/libs/metagen/metatype.yaml b/libs/metagen/metatype.yaml new file mode 100644 index 0000000000..e69de29bb2 From dbe7e158bad01f232455b1b6bcbc5e4fc7c350a7 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Wed, 12 Jun 2024 10:56:17 +0300 Subject: [PATCH 21/35] pass error stack to the cli --- meta-cli/src/cli/gen.rs | 2 +- meta-cli/src/cli/serialize.rs | 55 +++++---- meta-cli/src/deploy/actors/task/deploy.rs | 8 +- meta-cli/src/deploy/actors/task/serialize.rs | 8 +- meta-cli/src/deploy/actors/task_manager.rs | 37 +++--- typegate/tests/e2e/cli/deploy_test.ts | 24 ++-- .../tests/e2e/self_deploy/self_deploy_test.ts | 63 +++++----- .../__snapshots__/validator_test.ts.snap | 23 +++- .../tests/e2e/typegraph/validator_test.ts | 11 +- typegate/tests/utils/test.ts | 116 ++++++++++-------- typegate/tests/utils/tg_deploy_script.py | 3 + typegraph/node/sdk/src/tg_artifact_upload.ts | 12 +- typegraph/node/sdk/src/tg_manage.ts | 6 +- typegraph/python/typegraph/graph/tg_manage.py | 16 ++- typegraph/python/typegraph/graph/typegraph.py | 14 +-- typegraph/python/typegraph/wit.py | 11 +- 16 files changed, 229 insertions(+), 180 deletions(-) diff --git a/meta-cli/src/cli/gen.rs b/meta-cli/src/cli/gen.rs index d47e66a3a5..6afe6ef103 100644 --- a/meta-cli/src/cli/gen.rs +++ b/meta-cli/src/cli/gen.rs @@ -162,7 +162,7 @@ async fn load_tg_at( .max_parallel_tasks(1); let report = init.run().await; - let mut tgs = report.into_typegraphs(); + let mut tgs = report.into_typegraphs()?; if tgs.is_empty() { bail!("not typegraphs loaded from path at {path:?}") diff --git a/meta-cli/src/cli/serialize.rs b/meta-cli/src/cli/serialize.rs index 04a53f6184..c1361dcf2a 100644 --- a/meta-cli/src/cli/serialize.rs +++ b/meta-cli/src/cli/serialize.rs @@ -4,9 +4,7 @@ use super::{Action, ConfigArgs}; use crate::config::{Config, PathOption}; use crate::deploy::actors::console::ConsoleActor; -use crate::deploy::actors::task::serialize::{ - SerializeAction, SerializeActionGenerator, SerializeError, -}; +use crate::deploy::actors::task::serialize::{SerializeAction, SerializeActionGenerator}; use crate::deploy::actors::task::TaskFinishStatus; use crate::deploy::actors::task_manager::{Report, StopReason, TaskManagerInit, TaskSource}; use crate::interlude::*; @@ -97,7 +95,7 @@ impl Action for Serialize { } // TODO no need to report errors - let tgs = report.into_typegraphs(); + let tgs = report.into_typegraphs()?; if let Some(tg_name) = self.typegraph.as_ref() { if let Some(tg) = tgs.iter().find(|tg| &tg.name().unwrap() == tg_name) { @@ -126,37 +124,42 @@ impl Action for Serialize { pub trait SerializeReportExt { #[allow(clippy::vec_box)] - fn into_typegraphs(self) -> Vec>; + fn into_typegraphs(self) -> Result>>; } impl SerializeReportExt for Report { - fn into_typegraphs(self) -> Vec> { - self.entries - .into_iter() - .flat_map(|entry| match entry.status { - TaskFinishStatus::Finished(results) => results - .into_iter() - .map(|(_, v)| v) - .collect::, SerializeError>>() - .unwrap_or_else(|e| { - tracing::error!( - "serialization failed for typegraph '{}' at {:?}: {}", - e.typegraph, - entry.path, - e.error - ); - vec![] - }), + fn into_typegraphs(self) -> Result>> { + let mut res = vec![]; + for entry in self.entries.into_iter() { + match entry.status { + TaskFinishStatus::Finished(results) => { + for (_, tg) in results.into_iter() { + let tg = tg.map_err(|_e| { + // tracing::error!( + // "serialization failed for typegraph '{}' at {:?}", + // e.typegraph, + // entry.path, + // ); + // for err in e.errors.into_iter() { + // tracing::error!("- {err}"); + // } + ferr!("failed") + })?; + res.push(tg); + } + } TaskFinishStatus::Cancelled => { tracing::error!("serialization cancelled for {:?}", entry.path); - vec![] + return Err(ferr!("cancelled")); } TaskFinishStatus::Error => { tracing::error!("serialization failed for {:?}", entry.path); - vec![] + return Err(ferr!("failed")); } - }) - .collect() + } + } + + Ok(res) } } diff --git a/meta-cli/src/deploy/actors/task/deploy.rs b/meta-cli/src/deploy/actors/task/deploy.rs index e200e91fc3..5309437d1b 100644 --- a/meta-cli/src/deploy/actors/task/deploy.rs +++ b/meta-cli/src/deploy/actors/task/deploy.rs @@ -123,7 +123,7 @@ pub struct DeploySuccess { #[derive(Deserialize, Debug)] pub struct DeployError { typegraph: String, - error: String, + errors: Vec, } impl OutputData for DeploySuccess { @@ -279,12 +279,14 @@ impl TaskAction for DeployAction { Err(data) => { ctx.console.error(format!( - "{icon} failed to deploy typegraph {name} from {path}: {err}", + "{icon} failed to deploy typegraph {name} from {path}", icon = "✗".red(), name = data.get_typegraph_name().cyan(), path = self.task_ref.path.display().yellow(), - err = data.error, )); + for error in &data.errors { + ctx.console.error(format!("- {error}", error = error)); + } Ok(None) } } diff --git a/meta-cli/src/deploy/actors/task/serialize.rs b/meta-cli/src/deploy/actors/task/serialize.rs index fed674a29a..f9275039b2 100644 --- a/meta-cli/src/deploy/actors/task/serialize.rs +++ b/meta-cli/src/deploy/actors/task/serialize.rs @@ -69,7 +69,7 @@ impl TaskActionGenerator for SerializeActionGenerator { #[derive(Deserialize, Debug)] pub struct SerializeError { pub typegraph: String, - pub error: String, + pub errors: Vec, } impl OutputData for Box { @@ -147,12 +147,14 @@ impl TaskAction for SerializeAction { } Err(output) => { ctx.console.error(format!( - "{icon} failed to serialize typegraph {name} from {path}: {err}", + "{icon} failed to serialize typegraph {name} from {path}", icon = "✗".red(), name = output.get_typegraph_name().cyan(), path = self.task_ref.path.display().yellow(), - err = output.error, )); + for err in output.errors.iter() { + ctx.console.error(format!("- {err}")); + } } } diff --git a/meta-cli/src/deploy/actors/task_manager.rs b/meta-cli/src/deploy/actors/task_manager.rs index 8bda9cfb70..7a4f794873 100644 --- a/meta-cli/src/deploy/actors/task_manager.rs +++ b/meta-cli/src/deploy/actors/task_manager.rs @@ -379,27 +379,32 @@ impl Handler> for TaskManager { self.active_tasks.remove(&message.task_ref.path); ctx.address().do_send(NextTask); - let mut next_retry_no = None; - match &message.status { - TaskFinishStatus::Error => { - if message.task_ref.retry_no < self.init_params.max_retry_count { - next_retry_no = Some(message.task_ref.retry_no + 1); - } - } - TaskFinishStatus::Finished(results) => { - // TODO partial retry - if multiple typegraphs in a single file - if results.iter().any(|r| r.1.is_err()) { - next_retry_no = Some(message.task_ref.retry_no + 1); + let next_retry_no: Option = + if message.task_ref.retry_no < self.init_params.max_retry_count { + match &message.status { + TaskFinishStatus::Error => Some(message.task_ref.retry_no + 1), + TaskFinishStatus::Finished(results) => { + // TODO partial retry - if multiple typegraphs in a single file + if results.iter().any(|r| r.1.is_err()) { + Some(message.task_ref.retry_no + 1) + } else { + None + } + } + TaskFinishStatus::Cancelled => None, } - } - _ => {} - }; + } else { + None + }; self.reports .insert(message.task_ref.path.clone(), message.status); - if let Some(_next_retry_no) = next_retry_no { - todo!("not implemented"); + if let Some(next_retry_no) = next_retry_no { + todo!( + "not implemented: retry no {next_retry_no}/{}", + self.init_params.max_retry_count + ); } // TODO check queue?? diff --git a/typegate/tests/e2e/cli/deploy_test.ts b/typegate/tests/e2e/cli/deploy_test.ts index 5178dca8a6..886ddec8fb 100644 --- a/typegate/tests/e2e/cli/deploy_test.ts +++ b/typegate/tests/e2e/cli/deploy_test.ts @@ -255,12 +255,12 @@ Meta.test( "have replaced and terminated the previous engine", async () => { await gql` - query { - findManyRecords { - id - } - } - ` + query { + findManyRecords { + id + } + } + ` .expectData({ findManyRecords: [], }) @@ -345,12 +345,12 @@ Meta.test( "succeed have replaced and terminated the previous engine", async () => { await gql` - query { - findManyRecords { - id - } - } - ` + query { + findManyRecords { + id + } + } + ` .expectData({ findManyRecords: [], }) diff --git a/typegate/tests/e2e/self_deploy/self_deploy_test.ts b/typegate/tests/e2e/self_deploy/self_deploy_test.ts index 1c03d834e1..f51e8710c0 100644 --- a/typegate/tests/e2e/self_deploy/self_deploy_test.ts +++ b/typegate/tests/e2e/self_deploy/self_deploy_test.ts @@ -9,39 +9,36 @@ import { join } from "std/path/join.ts"; import { assertEquals, assertExists } from "std/assert/mod.ts"; import * as path from "std/path/mod.ts"; -Meta.test({ - name: "deploy and undeploy typegraph without meta-cli", -}, async (t) => { - const gate = `http://localhost:${t.port}`; - const auth = new BasicAuth("admin", "password"); - const cwdDir = join(testDir, "e2e", "self_deploy"); +Meta.test( + { + name: "deploy and undeploy typegraph without meta-cli", + }, + async (t) => { + const gate = `http://localhost:${t.port}`; + const auth = new BasicAuth("admin", "password"); + const cwdDir = join(testDir, "e2e", "self_deploy"); - const { serialized, typegate: gateResponseAdd } = await tgDeploy(tg, { - baseUrl: gate, - auth, - secrets: {}, - typegraphPath: path.join(cwdDir, "self_deploy.mjs"), - artifactsConfig: { - prismaMigration: { - globalAction: { - create: true, - reset: false, - }, - migrationDir: "prisma-migrations", + const { serialized, response: gateResponseAdd } = await tgDeploy(tg, { + typegate: { url: gate, auth }, + secrets: {}, + typegraphPath: path.join(cwdDir, "self_deploy.mjs"), + migrationsDir: `${cwdDir}/prisma-migrations`, + defaultMigrationAction: { + apply: true, + create: true, + reset: false, }, - dir: cwdDir, - }, - }); - assertExists(serialized, "serialized has a value"); - assertEquals(gateResponseAdd, { - data: { - addTypegraph: { name: "self-deploy", messages: [], migrations: [] }, - }, - }); + }); + assertExists(serialized, "serialized has a value"); + assertEquals(gateResponseAdd, { + name: "self-deploy", + messages: [], + migrations: [], + }); - const { typegate: gateResponseRem } = await tgRemove(tg, { - baseUrl: gate, - auth, - }); - assertEquals(gateResponseRem, { data: { removeTypegraphs: true } }); -}); + const { typegate: gateResponseRem } = await tgRemove(tg, { + typegate: { url: gate, auth }, + }); + assertEquals(gateResponseRem, { data: { removeTypegraphs: true } }); + }, +); diff --git a/typegate/tests/e2e/typegraph/__snapshots__/validator_test.ts.snap b/typegate/tests/e2e/typegraph/__snapshots__/validator_test.ts.snap index df0f742942..ca01d345da 100644 --- a/typegate/tests/e2e/typegraph/__snapshots__/validator_test.ts.snap +++ b/typegate/tests/e2e/typegraph/__snapshots__/validator_test.ts.snap @@ -1,8 +1,27 @@ export const snapshot = {}; snapshot[`typegraph validation 1`] = ` -\`Error: - 0: SDK serialization error: Error(stack=['at validator:/test/[in]/a: Expected number got \\\\'"1"\\\\'', 'at validator:/test/[in]/b: Expected a string, got \\\\'["h","e","l","l","o"]\\\\'', 'at validator:/test/[in]/c: Expected a minimum value of 2, got 0', 'at validator:/test/[in]/d: Expected a maximun length of 4, got "hello" (len=5)', 'at validator:/test/[in]/e: Required field "a" not found in object \\\\'{}\\\\'', 'at validator:/test/[in]/f: Required field "a" not found in object \\\\'{"b":1}\\\\'', 'at validator:/test/[in]/g: Unexpected fields "b" in object "{\\\\\\\\"a\\\\\\\\":2,\\\\\\\\"b\\\\\\\\":1}"', 'at validator:/testEnums/[in]/a: Expected a minimum length of 4, got "hi" (len=2)', "at validator:/testEnums/[in]/a: Expected a string, got '12'", 'at validator:/testEnums/[in]/b: Expected float got \\\\'"13"\\\\'', 'at validator:/testEnums/[out]/a: Expected a minimum length of 4, got "hi" (len=2)', "at validator:/testEnums/[out]/a: Expected a string, got '12'", 'at validator:/testEnums/[out]/b: Expected float got \\\\'"13"\\\\'', 'at validator:/testFromParent/[out]/nested/[in]/a: from_parent injection: Type mismatch: integer to string', 'at validator:/testFromParent/[out]/nested/[in]/b: from_parent injection: minimum_length is not defined in the subtype', 'at validator:/testFromParent/[out]/nested/[in]/b: from_parent injection: maximum_length is greater than the maximum_length of the supertype', 'at validator:/testFromParent/[out]/nested/[in]/c: from_parent injection: property b is not defined in the supertype', 'Typegraph validator failed validation']) +\`ERROR meta::deploy::actors::console: ✗ failed to serialize typegraph validator from validator.py +ERROR meta::deploy::actors::console: - at validator:/test/[in]/a: Expected number got '"1"' +ERROR meta::deploy::actors::console: - at validator:/test/[in]/b: Expected a string, got '["h","e","l","l","o"]' +ERROR meta::deploy::actors::console: - at validator:/test/[in]/c: Expected a minimum value of 2, got 0 +ERROR meta::deploy::actors::console: - at validator:/test/[in]/d: Expected a maximun length of 4, got "hello" (len=5) +ERROR meta::deploy::actors::console: - at validator:/test/[in]/e: Required field "a" not found in object '{}' +ERROR meta::deploy::actors::console: - at validator:/test/[in]/f: Required field "a" not found in object '{"b":1}' +ERROR meta::deploy::actors::console: - at validator:/test/[in]/g: Unexpected fields "b" in object "{\\\\"a\\\\":2,\\\\"b\\\\":1}" +ERROR meta::deploy::actors::console: - at validator:/testEnums/[in]/a: Expected a minimum length of 4, got "hi" (len=2) +ERROR meta::deploy::actors::console: - at validator:/testEnums/[in]/a: Expected a string, got '12' +ERROR meta::deploy::actors::console: - at validator:/testEnums/[in]/b: Expected float got '"13"' +ERROR meta::deploy::actors::console: - at validator:/testEnums/[out]/a: Expected a minimum length of 4, got "hi" (len=2) +ERROR meta::deploy::actors::console: - at validator:/testEnums/[out]/a: Expected a string, got '12' +ERROR meta::deploy::actors::console: - at validator:/testEnums/[out]/b: Expected float got '"13"' +ERROR meta::deploy::actors::console: - at validator:/testFromParent/[out]/nested/[in]/a: from_parent injection: Type mismatch: integer to string +ERROR meta::deploy::actors::console: - at validator:/testFromParent/[out]/nested/[in]/b: from_parent injection: minimum_length is not defined in the subtype +ERROR meta::deploy::actors::console: - at validator:/testFromParent/[out]/nested/[in]/b: from_parent injection: maximum_length is greater than the maximum_length of the supertype +ERROR meta::deploy::actors::console: - at validator:/testFromParent/[out]/nested/[in]/c: from_parent injection: property b is not defined in the supertype +ERROR meta::deploy::actors::console: - Typegraph validator failed validation +Error: + 0: failed Backtrace omitted. Run with RUST_BACKTRACE=1 environment variable to display it. Run with RUST_BACKTRACE=full to include source snippets. diff --git a/typegate/tests/e2e/typegraph/validator_test.ts b/typegate/tests/e2e/typegraph/validator_test.ts index 266776fdeb..b43dad53cb 100644 --- a/typegate/tests/e2e/typegraph/validator_test.ts +++ b/typegate/tests/e2e/typegraph/validator_test.ts @@ -15,11 +15,11 @@ Meta.test("typegraph validation", async (t) => { await m.cli( { env: { - "RUST_LOG": "error", - "RUST_BACKTRACE": "0", - "RUST_SPANTRACE": "0", - "RUST_LIB_BACKTRACE": "0", - "RUST_ERR_LOCATION": "0", + RUST_LOG: "error", + RUST_BACKTRACE: "0", + RUST_SPANTRACE: "0", + RUST_LIB_BACKTRACE: "0", + RUST_ERR_LOCATION: "0", }, }, "serialize", @@ -28,6 +28,7 @@ Meta.test("typegraph validation", async (t) => { ); fail("should have thrown"); } catch (e) { + console.log(e); await t.assertSnapshot(e.stderr); } }, diff --git a/typegate/tests/utils/test.ts b/typegate/tests/utils/test.ts index e4bc7d2495..4f3f5347a4 100644 --- a/typegate/tests/utils/test.ts +++ b/typegate/tests/utils/test.ts @@ -60,23 +60,26 @@ interface ServeResult extends AsyncDisposable { function serve(typegates: TypegateManager): Promise { return new Promise((resolve) => { - const server = Deno.serve({ - port: 0, - onListen: ({ port }) => { - resolve({ - port, - async [Symbol.asyncDispose]() { - await server.shutdown(); - }, + const server = Deno.serve( + { + port: 0, + onListen: ({ port }) => { + resolve({ + port, + async [Symbol.asyncDispose]() { + await server.shutdown(); + }, + }); + }, + }, + (req) => { + return typegates.next().handle(req, { + hostname: "localhost", + port: 0, + transport: "tcp", }); }, - }, (req) => { - return typegates.next().handle(req, { - hostname: "localhost", - port: 0, - transport: "tcp", - }); - }); + ); }); } @@ -120,8 +123,7 @@ export class MetaTest { private introspection: boolean, public port: number, public disposables: AsyncDisposableStack, - ) { - } + ) {} async [Symbol.asyncDispose]() { if (this.#disposed) return; @@ -142,7 +144,12 @@ export class MetaTest { } getTypegraphEngine(name: string): QueryEngine | undefined { - return this.typegates.next().register.get(name); + const register = this.typegates.next().register; + // console.log( + // "available typegraphs", + // register.list().map((e) => e.name), + // ); + return register.get(name); } async serialize(path: string, opts: ParseOptions = {}): Promise { @@ -189,8 +196,11 @@ export class MetaTest { secrets: Record, ): Promise { const tg = await TypeGraph.parseJson(tgString); - const { engine, response } = await this.typegate - .pushTypegraph(tg, secrets, this.introspection); + const { engine, response } = await this.typegate.pushTypegraph( + tg, + secrets, + this.introspection, + ); if (engine == null) { throw response.failure!; @@ -236,13 +246,13 @@ export class MetaTest { cwd: string, opts: ParseOptions, ): Promise { - let output; const secrets = opts.secrets ?? {}; const secretsStr = JSON.stringify(secrets); + const cmd = [lang.toString()]; + if (lang === SDKLangugage.TypeScript) { - const cmd = [ - lang.toString(), + cmd.push( "run", "--allow-all", "utils/tg_deploy_script.ts", @@ -250,26 +260,28 @@ export class MetaTest { this.port.toString(), path, secretsStr, - ]; - if (opts.typegraph) { - cmd.push(opts.typegraph); - } - output = await this.shell(cmd); + ); } else { - const cmd = [ - lang.toString(), + cmd.push( "utils/tg_deploy_script.py", cwd, this.port.toString(), path, secretsStr, - ]; - if (opts.typegraph) { - cmd.push(opts.typegraph); - } - output = await this.shell(cmd); + ); + } + + if (opts.typegraph) { + cmd.push(opts.typegraph); + } + + const env: Record = {}; + if (opts.prefix) { + env["PREFIX"] = opts.prefix; } + const output = await this.shell(cmd, { env }); + const { stderr, stdout, code } = output; if (code !== 0) { @@ -428,21 +440,20 @@ export const test = ((o, fn): void => { } const tempDirs = await Promise.all( - Array.from({ length: replicas }).map( - async (_) => { - const uuid = crypto.randomUUID(); - return await Deno.makeTempDir({ - prefix: `typegate-test-${uuid}`, - dir: config.tmp_dir, - }); - }, - ), + Array.from({ length: replicas }).map(async (_) => { + const uuid = crypto.randomUUID(); + return await Deno.makeTempDir({ + prefix: `typegate-test-${uuid}`, + dir: config.tmp_dir, + }); + }), ); // TODO different tempDir for each typegate instance const result = await Promise.allSettled( - Array.from({ length: replicas }).map(async (_, index) => - await Typegate.init(opts.syncConfig ?? null, null, tempDirs[index]) + Array.from({ length: replicas }).map( + async (_, index) => + await Typegate.init(opts.syncConfig ?? null, null, tempDirs[index]), ), ); const typegates = result.map((r) => { @@ -453,10 +464,7 @@ export const test = ((o, fn): void => { } }); - const { - gitRepo = null, - introspection = false, - } = opts; + const { gitRepo = null, introspection = false } = opts; await Promise.all( typegates.map((typegate) => SystemTypegraph.loadAll(typegate)), ); @@ -468,11 +476,11 @@ export const test = ((o, fn): void => { ); mt.disposables.defer(async () => { - await Promise.all(tempDirs.map( - async (tempDir, _) => { + await Promise.all( + tempDirs.map(async (tempDir, _) => { await Deno.remove(tempDir, { recursive: true }); - }, - )); + }), + ); }); if (opts.teardown != null) { diff --git a/typegate/tests/utils/tg_deploy_script.py b/typegate/tests/utils/tg_deploy_script.py index bbf698b589..ebaa20c3af 100644 --- a/typegate/tests/utils/tg_deploy_script.py +++ b/typegate/tests/utils/tg_deploy_script.py @@ -57,12 +57,15 @@ if global_action_reset is not True: global_action_create = global_action_create == "true" +prefix = os.environ.get("PREFIX") + deploy_result = tg_deploy( tg, TypegraphDeployParams( typegate=TypegateConnectionOptions(url=gate, auth=auth), typegraph_path=os.path.join(cwd, module_name), + prefix=prefix, secrets=secrets, migrations_dir=migration_dir, migration_actions=None, diff --git a/typegraph/node/sdk/src/tg_artifact_upload.ts b/typegraph/node/sdk/src/tg_artifact_upload.ts index 8cccd8e330..9752960d15 100644 --- a/typegraph/node/sdk/src/tg_artifact_upload.ts +++ b/typegraph/node/sdk/src/tg_artifact_upload.ts @@ -98,18 +98,8 @@ export class ArtifactUploader { `failed to upload artifact ${meta.relativePath}`, ); - if (!res.ok) { - const err = await res.json(); - // To be read by the CLI? - log.error("Failed to upload artifact", meta.relativePath, err); - console.log(err); - throw new Error( - `Failed to upload artifact '${path}' (${res.status}): ${err.error}`, - ); - } - const ret = res.json(); log.info("✓ artifact uploaded:", meta.relativePath); - return ret; + return res; } private getMetas(artifacts: Artifact[]): UploadArtifactMeta[] { diff --git a/typegraph/node/sdk/src/tg_manage.ts b/typegraph/node/sdk/src/tg_manage.ts index f7cc9181c7..13e023d42d 100644 --- a/typegraph/node/sdk/src/tg_manage.ts +++ b/typegraph/node/sdk/src/tg_manage.ts @@ -56,7 +56,7 @@ export class Manager { } catch (err: any) { log.failure({ typegraph: this.#typegraph.name, - error: err?.message ?? "failed to serialize typegraph", + errors: err?.stack ?? [err?.message ?? "failed to serialize typegraph"], }); } } @@ -88,7 +88,7 @@ export class Manager { } catch (err: any) { log.failure({ typegraph: this.#typegraph.name, - error: err?.message ?? "failed to serialize typegraph", + errors: err?.stack ?? [err?.message ?? "failed to serialize typegraph"], }); return; } @@ -119,7 +119,7 @@ export class Manager { } catch (err: any) { log.failure({ typegraph: this.#typegraph.name, - error: err?.message ?? "failed to deploy typegraph", + errors: err?.stack ?? [err?.message ?? "failed to deploy typegraph"], }); return; } diff --git a/typegraph/python/typegraph/graph/tg_manage.py b/typegraph/python/typegraph/graph/tg_manage.py index 6ae1eebd61..12bf4952bf 100644 --- a/typegraph/python/typegraph/graph/tg_manage.py +++ b/typegraph/python/typegraph/graph/tg_manage.py @@ -15,6 +15,7 @@ TypegraphDeployParams, tg_deploy, ) +from typegraph.wit import ErrorStack from typegraph.utils import freeze_tg_output from typegraph.io import Log, Rpc from typegraph.envs.cli import CliEnv, Command, get_cli_env @@ -63,7 +64,10 @@ def serialize(self): Log.success(res.tgJson, noencode=True) except Exception as err: Log.debug(traceback.format_exc()) - Log.failure({"typegraph": self.typegraph.name, "error": str(err)}) + if isinstance(err, ErrorStack): + Log.failure({"typegraph": self.typegraph.name, "errors": err.stack}) + else: + Log.failure({"typegraph": self.typegraph.name, "errors": [str(err)]}) def deploy(self): env = self.env @@ -88,7 +92,10 @@ def deploy(self): frozen_out.serialize(params) except Exception as err: Log.debug(traceback.format_exc()) - Log.failure({"typegraph": self.typegraph.name, "error": str(err)}) + if isinstance(err, ErrorStack): + Log.failure({"typegraph": self.typegraph.name, "errors": err.stack}) + else: + Log.failure({"typegraph": self.typegraph.name, "errors": [str(err)]}) return try: @@ -115,5 +122,8 @@ def deploy(self): Log.success({"typegraph": self.typegraph.name, **response}) except Exception as err: Log.debug(traceback.format_exc()) - Log.failure({"typegraph": self.typegraph.name, "error": str(err)}) + if isinstance(err, ErrorStack): + Log.failure({"typegraph": self.typegraph.name, "errors": err.stack}) + else: + Log.failure({"typegraph": self.typegraph.name, "errors": [str(err)]}) return diff --git a/typegraph/python/typegraph/graph/typegraph.py b/typegraph/python/typegraph/graph/typegraph.py index 1cf18d4a7e..6ed7e5ffee 100644 --- a/typegraph/python/typegraph/graph/typegraph.py +++ b/typegraph/python/typegraph/graph/typegraph.py @@ -20,7 +20,7 @@ from typegraph.graph.shared_types import FinalizationResult, TypegraphOutput from typegraph.policy import Policy, PolicyPerEffect, PolicySpec, get_policy_chain from typegraph.envs.cli import CLI_ENV -from typegraph.wit import core, store, wit_utils +from typegraph.wit import ErrorStack, core, store, wit_utils from typegraph.io import Log if TYPE_CHECKING: @@ -87,7 +87,7 @@ def expose( ) if isinstance(res, Err): - raise Exception(res.value) + raise ErrorStack(res.value) @dataclass @@ -134,7 +134,7 @@ def inherit(self): def rest(self, graphql: str) -> int: res = wit_utils.add_graphql_endpoint(store, graphql) if isinstance(res, Err): - raise Exception(res.value) + raise ErrorStack(res.value) return res.value def auth(self, value: Union[Auth, RawAuth]): @@ -144,7 +144,7 @@ def auth(self, value: Union[Auth, RawAuth]): else wit_utils.add_auth(store, value) ) if isinstance(res, Err): - raise Exception(res.value) + raise ErrorStack(res.value) return res.value def ref(self, name: str) -> "t.typedef": @@ -153,7 +153,7 @@ def ref(self, name: str) -> "t.typedef": def configure_random_injection(self, seed: int): res = core.set_seed(store, seed) if isinstance(res, Err): - raise Exception(res.value) + raise ErrorStack(res.value) def as_arg(self, name: Optional[str] = None): return ApplyFromArg(name) @@ -240,7 +240,7 @@ def serialize_with_artifacts( ): finalization_result = core.serialize_typegraph(store, config) if isinstance(finalization_result, Err): - raise Exception(finalization_result.value) + raise ErrorStack(finalization_result.value) tg_json, ref_artifacts = finalization_result.value return FinalizationResult(tg_json, ref_artifacts) @@ -262,7 +262,7 @@ def serialize_with_artifacts( def gen_ref(name: str) -> "t.typedef": res = core.refb(store, name, []) if isinstance(res, Err): - raise Exception(res.value) + raise ErrorStack(res.value) from typegraph.t import typedef return typedef(res.value) diff --git a/typegraph/python/typegraph/wit.py b/typegraph/python/typegraph/wit.py index 6db8152810..99dcc517ae 100644 --- a/typegraph/python/typegraph/wit.py +++ b/typegraph/python/typegraph/wit.py @@ -2,10 +2,11 @@ # SPDX-License-Identifier: MPL-2.0 from wasmtime import Store +from typing import List from typegraph.gen import Root, RootImports from typegraph.gen.exports.aws import Aws -from typegraph.gen.exports.core import Core +from typegraph.gen.exports.core import Core, Error from typegraph.gen.exports.runtimes import Runtimes from typegraph.gen.exports.utils import Utils from typegraph.host.host import HostImpl @@ -24,3 +25,11 @@ runtimes = Runtimes(_typegraph_core) aws = Aws(_typegraph_core) wit_utils = Utils(_typegraph_core) + + +class ErrorStack(Exception): + stack: List[str] + + def __init__(self, err: Error): + super(ErrorStack, self).__init__("\n".join(f"- {msg}" for msg in err.stack)) + self.stack = err.stack From 6ffb4cbc315f93a630f454b76b407dd424a077f3 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Wed, 12 Jun 2024 11:23:15 +0300 Subject: [PATCH 22/35] fix injection test --- typegate/tests/injection/injection_test.ts | 166 +++++++++++---------- typegate/tests/utils/tg_deploy_script.ts | 23 +-- 2 files changed, 93 insertions(+), 96 deletions(-) diff --git a/typegate/tests/injection/injection_test.ts b/typegate/tests/injection/injection_test.ts index b2696a8837..458c3b53e6 100644 --- a/typegate/tests/injection/injection_test.ts +++ b/typegate/tests/injection/injection_test.ts @@ -37,11 +37,11 @@ Meta.test("Injected values", async (t) => { await t.should("fail for missing context", async () => { await gql` - query { - test(a: 1) { - raw_int + query { + test(a: 1) { + raw_int + } } - } ` .expectErrorContains("'userId' not found at ``") .on(e); @@ -102,7 +102,9 @@ Meta.test("Injected values", async (t) => { await t.should("inject the right value matching the effect", async () => { await gql` query { - effect_none { operation } + effect_none { + operation + } } ` .expectData({ @@ -111,9 +113,15 @@ Meta.test("Injected values", async (t) => { .on(e); await gql` mutation { - effect_create { operation } - effect_delete { operation } - effect_update { operation } + effect_create { + operation + } + effect_delete { + operation + } + effect_update { + operation + } } ` .expectData({ @@ -211,7 +219,10 @@ Meta.test("Injection from/into graphql", async (t) => { name email messagesSent { - id text senderId recipientId + id + text + senderId + recipientId } } } @@ -239,7 +250,8 @@ Meta.test("dynamic value injection", async (t) => { test(a: 12) { date } - }` + } + ` .withContext({ userId: "123", }) @@ -262,13 +274,15 @@ Meta.test("Deno: value injection", async (t) => { await t.should("work", async () => { await gql` query { - test(input: {a: 12}) { + test(input: { a: 12 }) { input { a context optional_context raw_int - raw_obj { in } + raw_obj { + in + } alt_raw alt_secret alt_context_opt @@ -276,7 +290,8 @@ Meta.test("Deno: value injection", async (t) => { date } } - }` + } + ` .withContext({ userId: "123", }) @@ -324,12 +339,12 @@ Meta.test("Injection from nested context", async (t) => { "access injected nested context with array index", async () => { await gql` - query { - secondProfileData { - second + query { + secondProfileData { + second + } } - } - ` + ` .withContext({ profile: { data: [1234, 5678], @@ -344,71 +359,62 @@ Meta.test("Injection from nested context", async (t) => { }, ); - await t.should( - "access injected nested context with custom key", - async () => { - await gql` - query { - customKey { - custom - } + await t.should("access injected nested context with custom key", async () => { + await gql` + query { + customKey { + custom } - ` - .withContext({ - profile: { - "custom key": 123, - }, - }) - .expectData({ - customKey: { - custom: 123, - }, - }) - .on(e); - }, - ); + } + ` + .withContext({ + profile: { + "custom key": 123, + }, + }) + .expectData({ + customKey: { + custom: 123, + }, + }) + .on(e); + }); - await t.should( - "fail for invalid context", - async () => { - await gql` - query { - secondProfileData { - second - } + await t.should("fail for invalid context", async () => { + await gql` + query { + secondProfileData { + second } - ` - .withContext({ - profile: { - "invalid key": 123, - }, - }) - .expectErrorContains("Property 'data' not found at `.profile`") - .on(e); - }, - ); + } + ` + .withContext({ + profile: { + "invalid key": 123, + }, + }) + .expectErrorContains("Property 'data' not found at `.profile`") + .on(e); + }); - await t.should( - "work with missing context on optional type", - async () => { - await gql` - query { - optional { - optional - } + await t.should("work with missing context on optional type", async () => { + await gql` + query { + optional { + optional } - ` - .withContext({ - profile: { - id: 1234, - }, - }) - .expectData({ - optional: { - optional: null, - }, - }) - .on(e); - }, - ); + } + ` + .withContext({ + profile: { + id: 1234, + }, + }) + .expectData({ + optional: { + optional: null, + }, + }) + .on(e); + }); }); diff --git a/typegate/tests/utils/tg_deploy_script.ts b/typegate/tests/utils/tg_deploy_script.ts index a98cc80047..78cc1a293c 100644 --- a/typegate/tests/utils/tg_deploy_script.ts +++ b/typegate/tests/utils/tg_deploy_script.ts @@ -36,8 +36,6 @@ if (typeof tg === "function") { const secrets = JSON.parse(secretsStr); -const disableArtRes = Deno.env.get("DISABLE_ART_RES"); -const codegen = Deno.env.get("CODEGEN"); const migrationDir = Deno.env.get("MIGRATION_DIR") ?? "prisma-migrations"; let globalActionReset = Deno.env.get("GLOBAL_ACTION_RESET") ?? false; if (globalActionReset !== false) { @@ -49,22 +47,15 @@ if (globalActionCreate !== true) { } const { serialized, typegate: _gateResponseAdd } = await tgDeploy(tg, { - baseUrl: gate, - auth, - artifactsConfig: { - disableArtifactResolution: disableArtRes, - codegen, - prismaMigration: { - globalAction: { - create: globalActionCreate, - reset: globalActionReset, - }, - migrationDir: migrationDir, - }, - dir: cwd, - }, + typegate: { url: gate, auth }, typegraphPath: tgPath, + prefix: Deno.env.get("PREFIX") ?? undefined, secrets: secrets, + migrationsDir: `${cwd}/${migrationDir}`, + defaultMigrationAction: { + create: globalActionCreate, + reset: globalActionReset, + }, }); console.log(serialized); From e84a37e9c648998cf018f368ab5b8b4603ec6cb7 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Wed, 12 Jun 2024 21:03:37 +0300 Subject: [PATCH 23/35] fix template_test --- dev/lock.yml | 4 +- examples/metatype.yaml | 2 + examples/templates/deno/api/example.ts | 6 +- examples/templates/node/package.json | 2 +- examples/templates/node/pnpm-lock.yaml | 315 ------------------ meta-cli/src/cli/deploy.rs | 4 +- meta-cli/src/deploy/actors/task_manager.rs | 30 +- .../tests/e2e/templates/templates_test.ts | 137 +++----- 8 files changed, 78 insertions(+), 422 deletions(-) delete mode 100644 examples/templates/node/pnpm-lock.yaml diff --git a/dev/lock.yml b/dev/lock.yml index 463049ab63..f5e4cf2b91 100644 --- a/dev/lock.yml +++ b/dev/lock.yml @@ -78,9 +78,9 @@ dev: examples/**/pyproject.toml: (typegraph = ").+("): METATYPE_VERSION examples/**/package.json: - '(\s*"@typegraph\/sdk"\s*:\s*"\^).+(",?)': PUBLISHED_VERSION + '(\s*"@typegraph\/sdk"\s*:\s*"\^).+(",?)': METATYPE_VERSION examples/**/*.ts: - '(import\s+.+\s+from "npm:@typegraph\/sdk@)[^\/]+(\/.+";)': PUBLISHED_VERSION + '(import\s+.+\s+from "npm:@typegraph\/sdk@)[^\/]+(\/.+";)': METATYPE_VERSION typegate/import_map.json: '(\s*"@typegraph\/sdk"\s*:\s*"npm:@typegraph\/sdk@).+(",?)': METATYPE_VERSION '(\s*"@typegraph\/sdk\/"\s*:\s*"npm:/@typegraph\/sdk@).+(\/",?)': METATYPE_VERSION diff --git a/examples/metatype.yaml b/examples/metatype.yaml index bd40431f20..f8299918b5 100644 --- a/examples/metatype.yaml +++ b/examples/metatype.yaml @@ -154,6 +154,8 @@ typegraphs: typescript: exclude: - "typegraphs/temporal.ts" + include: + - "typegraphs/*.ts" metagen: targets: diff --git a/examples/templates/deno/api/example.ts b/examples/templates/deno/api/example.ts index f5086a79b3..040f75fed3 100644 --- a/examples/templates/deno/api/example.ts +++ b/examples/templates/deno/api/example.ts @@ -1,6 +1,6 @@ -import { Policy, t, typegraph } from "npm:@typegraph/sdk@0.4.2/index.js"; -import { PythonRuntime } from "npm:@typegraph/sdk@0.4.2/runtimes/python.js"; -import { DenoRuntime } from "npm:@typegraph/sdk@0.4.2/runtimes/deno.js"; +import { Policy, t, typegraph } from "npm:@typegraph/sdk@0.4.3-0/index.js"; +import { PythonRuntime } from "npm:@typegraph/sdk@0.4.3-0/runtimes/python.js"; +import { DenoRuntime } from "npm:@typegraph/sdk@0.4.3-0/runtimes/deno.js"; await typegraph("example", (g) => { const pub = Policy.public(); diff --git a/examples/templates/node/package.json b/examples/templates/node/package.json index 4507e83e7c..dec6e076de 100644 --- a/examples/templates/node/package.json +++ b/examples/templates/node/package.json @@ -6,7 +6,7 @@ "dev": "MCLI_LOADER_CMD='npm x tsx' meta dev" }, "dependencies": { - "@typegraph/sdk": "^0.4.2" + "@typegraph/sdk": "^0.4.3-0" }, "devDependencies": { "tsx": "^3.13.0", diff --git a/examples/templates/node/pnpm-lock.yaml b/examples/templates/node/pnpm-lock.yaml deleted file mode 100644 index 719e5dc08f..0000000000 --- a/examples/templates/node/pnpm-lock.yaml +++ /dev/null @@ -1,315 +0,0 @@ -lockfileVersion: '9.0' - -settings: - autoInstallPeers: true - excludeLinksFromLockfile: false - -importers: - - .: - dependencies: - '@typegraph/sdk': - specifier: ^0.4.2 - version: 0.4.2 - devDependencies: - tsx: - specifier: ^3.13.0 - version: 3.14.0 - typescript: - specifier: ^5.2.2 - version: 5.4.5 - -packages: - - '@esbuild/android-arm64@0.18.20': - resolution: {integrity: sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ==} - engines: {node: '>=12'} - cpu: [arm64] - os: [android] - - '@esbuild/android-arm@0.18.20': - resolution: {integrity: sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw==} - engines: {node: '>=12'} - cpu: [arm] - os: [android] - - '@esbuild/android-x64@0.18.20': - resolution: {integrity: sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg==} - engines: {node: '>=12'} - cpu: [x64] - os: [android] - - '@esbuild/darwin-arm64@0.18.20': - resolution: {integrity: sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA==} - engines: {node: '>=12'} - cpu: [arm64] - os: [darwin] - - '@esbuild/darwin-x64@0.18.20': - resolution: {integrity: sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ==} - engines: {node: '>=12'} - cpu: [x64] - os: [darwin] - - '@esbuild/freebsd-arm64@0.18.20': - resolution: {integrity: sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw==} - engines: {node: '>=12'} - cpu: [arm64] - os: [freebsd] - - '@esbuild/freebsd-x64@0.18.20': - resolution: {integrity: sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ==} - engines: {node: '>=12'} - cpu: [x64] - os: [freebsd] - - '@esbuild/linux-arm64@0.18.20': - resolution: {integrity: sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA==} - engines: {node: '>=12'} - cpu: [arm64] - os: [linux] - - '@esbuild/linux-arm@0.18.20': - resolution: {integrity: sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg==} - engines: {node: '>=12'} - cpu: [arm] - os: [linux] - - '@esbuild/linux-ia32@0.18.20': - resolution: {integrity: sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA==} - engines: {node: '>=12'} - cpu: [ia32] - os: [linux] - - '@esbuild/linux-loong64@0.18.20': - resolution: {integrity: sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg==} - engines: {node: '>=12'} - cpu: [loong64] - os: [linux] - - '@esbuild/linux-mips64el@0.18.20': - resolution: {integrity: sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ==} - engines: {node: '>=12'} - cpu: [mips64el] - os: [linux] - - '@esbuild/linux-ppc64@0.18.20': - resolution: {integrity: sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA==} - engines: {node: '>=12'} - cpu: [ppc64] - os: [linux] - - '@esbuild/linux-riscv64@0.18.20': - resolution: {integrity: sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A==} - engines: {node: '>=12'} - cpu: [riscv64] - os: [linux] - - '@esbuild/linux-s390x@0.18.20': - resolution: {integrity: sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ==} - engines: {node: '>=12'} - cpu: [s390x] - os: [linux] - - '@esbuild/linux-x64@0.18.20': - resolution: {integrity: sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w==} - engines: {node: '>=12'} - cpu: [x64] - os: [linux] - - '@esbuild/netbsd-x64@0.18.20': - resolution: {integrity: sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A==} - engines: {node: '>=12'} - cpu: [x64] - os: [netbsd] - - '@esbuild/openbsd-x64@0.18.20': - resolution: {integrity: sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==} - engines: {node: '>=12'} - cpu: [x64] - os: [openbsd] - - '@esbuild/sunos-x64@0.18.20': - resolution: {integrity: sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ==} - engines: {node: '>=12'} - cpu: [x64] - os: [sunos] - - '@esbuild/win32-arm64@0.18.20': - resolution: {integrity: sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg==} - engines: {node: '>=12'} - cpu: [arm64] - os: [win32] - - '@esbuild/win32-ia32@0.18.20': - resolution: {integrity: sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g==} - engines: {node: '>=12'} - cpu: [ia32] - os: [win32] - - '@esbuild/win32-x64@0.18.20': - resolution: {integrity: sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ==} - engines: {node: '>=12'} - cpu: [x64] - os: [win32] - - '@typegraph/sdk@0.4.2': - resolution: {integrity: sha512-6IDm7V6XyibTJXhH3bhz7W7QYYkYFVTj/ycMOem/Cq9lQ4WN6pHO3yTzfQsldZci1A6U9JHKVFaunjhgNyo1eA==} - - buffer-from@1.1.2: - resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} - - esbuild@0.18.20: - resolution: {integrity: sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA==} - engines: {node: '>=12'} - hasBin: true - - fsevents@2.3.3: - resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} - engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} - os: [darwin] - - get-tsconfig@4.7.5: - resolution: {integrity: sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw==} - - resolve-pkg-maps@1.0.0: - resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} - - source-map-support@0.5.21: - resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==} - - source-map@0.6.1: - resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} - engines: {node: '>=0.10.0'} - - tsx@3.14.0: - resolution: {integrity: sha512-xHtFaKtHxM9LOklMmJdI3BEnQq/D5F73Of2E1GDrITi9sgoVkvIsrQUTY1G8FlmGtA+awCI4EBlTRRYxkL2sRg==} - hasBin: true - - typescript@5.4.5: - resolution: {integrity: sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==} - engines: {node: '>=14.17'} - hasBin: true - -snapshots: - - '@esbuild/android-arm64@0.18.20': - optional: true - - '@esbuild/android-arm@0.18.20': - optional: true - - '@esbuild/android-x64@0.18.20': - optional: true - - '@esbuild/darwin-arm64@0.18.20': - optional: true - - '@esbuild/darwin-x64@0.18.20': - optional: true - - '@esbuild/freebsd-arm64@0.18.20': - optional: true - - '@esbuild/freebsd-x64@0.18.20': - optional: true - - '@esbuild/linux-arm64@0.18.20': - optional: true - - '@esbuild/linux-arm@0.18.20': - optional: true - - '@esbuild/linux-ia32@0.18.20': - optional: true - - '@esbuild/linux-loong64@0.18.20': - optional: true - - '@esbuild/linux-mips64el@0.18.20': - optional: true - - '@esbuild/linux-ppc64@0.18.20': - optional: true - - '@esbuild/linux-riscv64@0.18.20': - optional: true - - '@esbuild/linux-s390x@0.18.20': - optional: true - - '@esbuild/linux-x64@0.18.20': - optional: true - - '@esbuild/netbsd-x64@0.18.20': - optional: true - - '@esbuild/openbsd-x64@0.18.20': - optional: true - - '@esbuild/sunos-x64@0.18.20': - optional: true - - '@esbuild/win32-arm64@0.18.20': - optional: true - - '@esbuild/win32-ia32@0.18.20': - optional: true - - '@esbuild/win32-x64@0.18.20': - optional: true - - '@typegraph/sdk@0.4.2': {} - - buffer-from@1.1.2: {} - - esbuild@0.18.20: - optionalDependencies: - '@esbuild/android-arm': 0.18.20 - '@esbuild/android-arm64': 0.18.20 - '@esbuild/android-x64': 0.18.20 - '@esbuild/darwin-arm64': 0.18.20 - '@esbuild/darwin-x64': 0.18.20 - '@esbuild/freebsd-arm64': 0.18.20 - '@esbuild/freebsd-x64': 0.18.20 - '@esbuild/linux-arm': 0.18.20 - '@esbuild/linux-arm64': 0.18.20 - '@esbuild/linux-ia32': 0.18.20 - '@esbuild/linux-loong64': 0.18.20 - '@esbuild/linux-mips64el': 0.18.20 - '@esbuild/linux-ppc64': 0.18.20 - '@esbuild/linux-riscv64': 0.18.20 - '@esbuild/linux-s390x': 0.18.20 - '@esbuild/linux-x64': 0.18.20 - '@esbuild/netbsd-x64': 0.18.20 - '@esbuild/openbsd-x64': 0.18.20 - '@esbuild/sunos-x64': 0.18.20 - '@esbuild/win32-arm64': 0.18.20 - '@esbuild/win32-ia32': 0.18.20 - '@esbuild/win32-x64': 0.18.20 - - fsevents@2.3.3: - optional: true - - get-tsconfig@4.7.5: - dependencies: - resolve-pkg-maps: 1.0.0 - - resolve-pkg-maps@1.0.0: {} - - source-map-support@0.5.21: - dependencies: - buffer-from: 1.1.2 - source-map: 0.6.1 - - source-map@0.6.1: {} - - tsx@3.14.0: - dependencies: - esbuild: 0.18.20 - get-tsconfig: 4.7.5 - source-map-support: 0.5.21 - optionalDependencies: - fsevents: 2.3.3 - - typescript@5.4.5: {} diff --git a/meta-cli/src/cli/deploy.rs b/meta-cli/src/cli/deploy.rs index 42c98461d7..35ee57470d 100644 --- a/meta-cli/src/cli/deploy.rs +++ b/meta-cli/src/cli/deploy.rs @@ -214,7 +214,7 @@ mod default_mode { TaskSource::Discovery(deploy.base_dir) }, ) - .max_retry_count(3); + .retry(3, None); if let Some(max_parallel_loads) = deploy.max_parallel_loads { init = init.max_parallel_tasks(max_parallel_loads); @@ -289,7 +289,7 @@ mod watch_mode { console.clone(), TaskSource::DiscoveryAndWatch(deploy.base_dir), ) - .max_retry_count(3); + .retry(3, None); if let Some(max_parallel_loads) = deploy.max_parallel_loads { init = init.max_parallel_tasks(max_parallel_loads); diff --git a/meta-cli/src/deploy/actors/task_manager.rs b/meta-cli/src/deploy/actors/task_manager.rs index 7a4f794873..c07889e244 100644 --- a/meta-cli/src/deploy/actors/task_manager.rs +++ b/meta-cli/src/deploy/actors/task_manager.rs @@ -1,6 +1,5 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 - use super::console::{Console, ConsoleActor}; use super::discovery::DiscoveryActor; use super::task::action::{TaskAction, TaskActionGenerator}; @@ -11,6 +10,7 @@ use futures::channel::oneshot; use indexmap::IndexMap; use std::collections::VecDeque; use std::sync::atomic::{AtomicUsize, Ordering}; +use std::time::Duration; pub mod report; pub use report::Report; @@ -115,11 +115,14 @@ pub struct TaskManager { console: Addr, } +const DEFAULT_INITIAL_RETRY_INTERVAL: Duration = Duration::from_secs(3); + pub struct TaskManagerInit { config: Arc, action_generator: A::Generator, max_parallel_tasks: usize, max_retry_count: usize, + initial_retry_interval: Duration, console: Addr, task_source: TaskSource, } @@ -136,6 +139,7 @@ impl TaskManagerInit { action_generator, max_parallel_tasks: num_cpus::get(), max_retry_count: 0, + initial_retry_interval: DEFAULT_INITIAL_RETRY_INTERVAL, console, task_source, } @@ -146,8 +150,9 @@ impl TaskManagerInit { self } - pub fn max_retry_count(mut self, max_retry_count: usize) -> Self { + pub fn retry(mut self, max_retry_count: usize, initial_interval: Option) -> Self { self.max_retry_count = max_retry_count; + self.initial_retry_interval = initial_interval.unwrap_or(DEFAULT_INITIAL_RETRY_INTERVAL); self } @@ -400,11 +405,22 @@ impl Handler> for TaskManager { self.reports .insert(message.task_ref.path.clone(), message.status); - if let Some(next_retry_no) = next_retry_no { - todo!( - "not implemented: retry no {next_retry_no}/{}", - self.init_params.max_retry_count - ); + if let Some(retry_no) = next_retry_no { + let path = message.task_ref.path; + let task_ref = self.task_generator.generate(path.clone(), retry_no); + let task_manager = ctx.address(); + self.pending_retries.insert(path.clone(), task_ref.id); + + let retry_interval = self.init_params.initial_retry_interval * (retry_no as u32); + + let fut = async move { + tokio::time::sleep(retry_interval).await; + task_manager.do_send(AddTask { + task_ref, + reason: TaskReason::Retry(retry_no), + }); + }; + ctx.spawn(fut.in_current_span().into_actor(self)); } // TODO check queue?? diff --git a/typegate/tests/e2e/templates/templates_test.ts b/typegate/tests/e2e/templates/templates_test.ts index e6d2fa2b17..a60fb1cc97 100644 --- a/typegate/tests/e2e/templates/templates_test.ts +++ b/typegate/tests/e2e/templates/templates_test.ts @@ -6,99 +6,52 @@ import { newTempDir, workspaceDir } from "test-utils/dir.ts"; import { exists, expandGlob } from "std/fs/mod.ts"; import { join } from "std/path/mod.ts"; import { assert } from "std/assert/mod.ts"; -import { projectDir } from "../../../../dev/utils.ts"; -// import { shell } from "test-utils/shell.ts"; +import { shell } from "test-utils/shell.ts"; -const modifiers: Record Promise | void> = { - "python": () => {}, - "deno": async (dir: string) => { - for await (const f of expandGlob("**/*.ts", { root: dir })) { - // FIXME: deno is unable to map the types from .d.ts files when used locally - const data = (await Deno.readTextFile(f.path)).replace( - /\(\s*g\s*\)/, - "(g: any)", - ); - const level = f.path.replace(projectDir, "").split("/").length - 2; - const newData = data.replaceAll( - /npm:@typegraph\/sdk@[0-9]+\.[0-9]+\.[0-9]+/g, - `${Array(level).fill("..").join("/")}/typegraph/node/sdk/dist`, - ); - - await Deno.writeTextFile(f.path, newData); - } +const install = { + python: async (_dir: string) => {}, + deno: async (_dir: string) => {}, + node: async (dir: string) => { + await shell(["pnpm", "install"], { currentDir: dir }); }, - "node": async (dir) => { - // Method 1. the published version from npm is used - - // // Method 2. install local module - // // should work once we have a `node` loader since - // // deno does not support file scheme yet - // // https://github.com/denoland/deno/issues/18474 - // await shell(["pnpm", "i", "../../typegraph/node"], { - // currentDir: dir, - // }); - - // Method 3. rewrite imports - // const importMap = JSON.parse( - // await Deno.readTextFile( - // import.meta.resolve("../../../../typegraph/node/sdk/package.json"), - // ), - // ); - for await (const f of expandGlob("**/*.ts", { root: dir })) { - const data = await Deno.readTextFile(f.path); - const newData = data.replace( - /"@typegraph\/sdk\/?(.*)"/g, - (_match, chunk) => { - return `"../../../typegraph/node/sdk/dist/${chunk}"`; - // const importFile = importMap?.exports[chunk]?.import; - // console.log(chunk, "=>", importFile); - // if (importFile) { - // return `"../../../typegraph/node/sdk/${ - // importFile.replace("./", "") - // }"`; - // } - }, - ); - await Deno.writeTextFile( - f.path, - newData, +} as const; + +for (const template of ["python", "deno", "node"] as const) { + Meta.test( + { + name: `${template} template`, + }, + async (t) => { + const dir = await newTempDir(); + + await t.should("should be extracted correctly", async () => { + const out = await Meta.cli("new", "--template", template, dir); + console.log(out.stdout); + const source = join(workspaceDir, "examples/templates", template); + const sourcesFiles = await Array.fromAsync( + expandGlob("**/*", { + root: source, + }), + ); + assert(sourcesFiles.length > 0); + for (const f of sourcesFiles) { + const relPath = f.path.replace(source, ""); + assert(exists(join(dir, relPath))); + } + }); + + await install[template](dir); + // await modifiers[template](dir); + const out = await Meta.cli( + { currentDir: dir }, + "deploy", + "--target", + "dev", + "--gate", + `http://localhost:${t.port}`, + "--allow-dirty", ); - } - }, -}; - -for (const template of ["python", "deno", "node"]) { - Meta.test({ - name: `${template} template`, - }, async (t) => { - const dir = await newTempDir(); - - await t.should("should be extracted correctly", async () => { - const out = await Meta.cli("new", "--template", template, dir); - console.log(out.stdout); - const source = join(workspaceDir, "examples/templates", template); - const sourcesFiles = await Array.fromAsync( - expandGlob("**/*", { - root: source, - }), - ); - assert(sourcesFiles.length > 0); - for (const f of sourcesFiles) { - const relPath = f.path.replace(source, ""); - assert(exists(join(dir, relPath))); - } - }); - - await modifiers[template](dir); - const out = await Meta.cli( - { currentDir: dir }, - "deploy", - "--target", - "dev", - "--gate", - `http://localhost:${t.port}`, - "--allow-dirty", - ); - console.log(out); - }); + console.log(out); + }, + ); } From a40e920500f367df190c17b95b23f80e36694c87 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Thu, 13 Jun 2024 23:21:20 +0300 Subject: [PATCH 24/35] artifact resolution env, path only artifact data in materializers --- libs/common/src/typegraph/runtimes/deno.rs | 6 +- libs/common/src/typegraph/runtimes/python.rs | 9 +- meta-cli/src/cli/gen.rs | 10 +- meta-cli/src/cli/serialize.rs | 1 + meta-cli/src/deploy/actors/task/action.rs | 1 + meta-cli/src/deploy/actors/task/command.rs | 4 + meta-cli/src/deploy/actors/task/deploy.rs | 1 + meta-cli/src/deploy/actors/task/serialize.rs | 8 +- typegate/src/runtimes/deno/deno.ts | 27 +- typegate/src/runtimes/python.ts | 222 +++++++------ typegate/src/typegate/artifacts/mod.ts | 10 +- typegraph/core/src/conversion/runtimes.rs | 10 +- typegraph/core/src/global_store.rs | 23 -- typegraph/core/src/lib.rs | 1 + typegraph/core/src/logger.rs | 63 ++++ typegraph/core/src/typegraph.rs | 5 +- typegraph/core/src/utils/archive.rs | 72 +++++ typegraph/core/src/utils/artifacts.rs | 26 ++ typegraph/core/src/utils/fs.rs | 147 +++++++++ typegraph/core/src/utils/fs_host.rs | 296 ------------------ typegraph/core/src/utils/mod.rs | 25 +- typegraph/core/src/utils/pathlib.rs | 24 ++ .../core/src/utils/postprocess/deno_rt.rs | 139 ++------ typegraph/core/src/utils/postprocess/mod.rs | 30 +- .../core/src/utils/postprocess/prisma_rt.rs | 15 +- .../core/src/utils/postprocess/python_rt.rs | 80 ++--- .../core/src/utils/postprocess/wasm_rt.rs | 36 ++- typegraph/core/wit/typegraph.wit | 6 +- typegraph/node/sdk/src/envs/cli.ts | 6 + typegraph/node/sdk/src/metagen.ts | 2 +- typegraph/node/sdk/src/tg_manage.ts | 10 +- typegraph/python/typegraph/envs/cli.py | 4 + typegraph/python/typegraph/graph/metagen.py | 6 +- 33 files changed, 605 insertions(+), 720 deletions(-) create mode 100644 typegraph/core/src/logger.rs create mode 100644 typegraph/core/src/utils/archive.rs create mode 100644 typegraph/core/src/utils/artifacts.rs create mode 100644 typegraph/core/src/utils/fs.rs delete mode 100644 typegraph/core/src/utils/fs_host.rs create mode 100644 typegraph/core/src/utils/pathlib.rs diff --git a/libs/common/src/typegraph/runtimes/deno.rs b/libs/common/src/typegraph/runtimes/deno.rs index c4180d2de7..e04eaf34d3 100644 --- a/libs/common/src/typegraph/runtimes/deno.rs +++ b/libs/common/src/typegraph/runtimes/deno.rs @@ -4,6 +4,7 @@ use indexmap::IndexMap; use serde::{Deserialize, Serialize}; use serde_json::Value; +use std::path::PathBuf; #[derive(Serialize, Deserialize, Clone, Debug)] pub struct FunctionMatData { @@ -13,9 +14,8 @@ pub struct FunctionMatData { #[derive(Serialize, Deserialize, Clone, Debug)] #[serde(rename_all = "camelCase")] pub struct ModuleMatData { - pub deno_artifact: IndexMap, - pub deps: Vec, - pub deps_meta: Option>>, + pub entry_point: PathBuf, + pub deps: Vec, } #[derive(Serialize, Deserialize, Clone, Debug)] diff --git a/libs/common/src/typegraph/runtimes/python.rs b/libs/common/src/typegraph/runtimes/python.rs index a601b6a88f..01c3f7156b 100644 --- a/libs/common/src/typegraph/runtimes/python.rs +++ b/libs/common/src/typegraph/runtimes/python.rs @@ -1,16 +1,15 @@ // Copyright Metatype OÜ, licensed under the Elastic License 2.0. // SPDX-License-Identifier: Elastic-2.0 -use indexmap::IndexMap; +use std::path::PathBuf; + use serde::{Deserialize, Serialize}; -use serde_json::Value; #[derive(Serialize, Deserialize, Clone, Debug)] #[serde(rename_all = "camelCase")] pub struct ModuleMatData { - pub python_artifact: IndexMap, - pub deps: Vec, - pub deps_meta: Option>>, + pub entry_point: PathBuf, + pub deps: Vec, } #[derive(Serialize, Deserialize, Clone, Debug)] diff --git a/meta-cli/src/cli/gen.rs b/meta-cli/src/cli/gen.rs index 6afe6ef103..877de3795b 100644 --- a/meta-cli/src/cli/gen.rs +++ b/meta-cli/src/cli/gen.rs @@ -7,7 +7,7 @@ use crate::config::PathOption; use crate::deploy::actors::task::serialize::{SerializeAction, SerializeActionGenerator}; use crate::deploy::actors::task_manager::{TaskManagerInit, TaskSource}; use crate::interlude::*; -use crate::{com::store::ServerStore, config::Config, deploy::actors::console::ConsoleActor}; +use crate::{config::Config, deploy::actors::console::ConsoleActor}; use actix::Actor; use clap::Parser; use common::typegraph::Typegraph; @@ -137,13 +137,6 @@ async fn load_tg_at( path: PathBuf, name: Option<&str>, ) -> anyhow::Result> { - ServerStore::with( - Some(crate::com::store::Command::Serialize), - Some(config.as_ref().clone()), - ); - ServerStore::set_artifact_resolution_flag(false); - // ServerStore::set_prefix(self.prefix.to_owned()); - let console = ConsoleActor::new(Arc::clone(&config)).start(); let config_dir: Arc = config.dir().unwrap_or_log().into(); @@ -155,6 +148,7 @@ async fn load_tg_at( config .prisma_migrations_base_dir(PathOption::Absolute) .into(), + false, ), console, TaskSource::Static(vec![path.clone()]), diff --git a/meta-cli/src/cli/serialize.rs b/meta-cli/src/cli/serialize.rs index c1361dcf2a..462f965f2a 100644 --- a/meta-cli/src/cli/serialize.rs +++ b/meta-cli/src/cli/serialize.rs @@ -66,6 +66,7 @@ impl Action for Serialize { config .prisma_migrations_base_dir(PathOption::Absolute) .into(), + true, ); if self.files.is_empty() { diff --git a/meta-cli/src/deploy/actors/task/action.rs b/meta-cli/src/deploy/actors/task/action.rs index abfd7c8d23..24fb41f74c 100644 --- a/meta-cli/src/deploy/actors/task/action.rs +++ b/meta-cli/src/deploy/actors/task/action.rs @@ -16,6 +16,7 @@ pub struct SharedActionConfig { pub working_dir: Arc, pub migrations_dir: Arc, pub default_migration_action: MigrationAction, + pub artifact_resolution: bool, } pub trait TaskActionGenerator: Clone { diff --git a/meta-cli/src/deploy/actors/task/command.rs b/meta-cli/src/deploy/actors/task/command.rs index b75794dc5c..d4ca079eff 100644 --- a/meta-cli/src/deploy/actors/task/command.rs +++ b/meta-cli/src/deploy/actors/task/command.rs @@ -101,6 +101,10 @@ impl CommandContext { "MCLI_MIGRATIONS_DIR", shared_config.migrations_dir.display().to_string(), ) + .env( + "MCLI_ARTIFACT_RESOLUTION", + shared_config.artifact_resolution.to_string(), + ) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()); diff --git a/meta-cli/src/deploy/actors/task/deploy.rs b/meta-cli/src/deploy/actors/task/deploy.rs index 5309437d1b..bfcda7170b 100644 --- a/meta-cli/src/deploy/actors/task/deploy.rs +++ b/meta-cli/src/deploy/actors/task/deploy.rs @@ -72,6 +72,7 @@ impl DeployActionGenerator { create: create_migrations, reset: destructive_migrations, }, + artifact_resolution: true, } .into(), } diff --git a/meta-cli/src/deploy/actors/task/serialize.rs b/meta-cli/src/deploy/actors/task/serialize.rs index f9275039b2..fefa118033 100644 --- a/meta-cli/src/deploy/actors/task/serialize.rs +++ b/meta-cli/src/deploy/actors/task/serialize.rs @@ -31,7 +31,12 @@ pub struct SerializeActionGenerator { } impl SerializeActionGenerator { - pub fn new(config_dir: Arc, working_dir: Arc, migrations_dir: Arc) -> Self { + pub fn new( + config_dir: Arc, + working_dir: Arc, + migrations_dir: Arc, + artifact_resolution: bool, + ) -> Self { Self { shared_config: SharedActionConfig { command: "serialize", @@ -43,6 +48,7 @@ impl SerializeActionGenerator { create: false, reset: false, }, + artifact_resolution, } .into(), } diff --git a/typegate/src/runtimes/deno/deno.ts b/typegate/src/runtimes/deno/deno.ts index cd312e849e..41e352db31 100644 --- a/typegate/src/runtimes/deno/deno.ts +++ b/typegate/src/runtimes/deno/deno.ts @@ -5,7 +5,7 @@ import { ComputeStage } from "../../engine/query_engine.ts"; import { TypeGraphDS, TypeMaterializer } from "../../typegraph/mod.ts"; import { Runtime } from "../Runtime.ts"; import { Resolver, RuntimeInitParams } from "../../types.ts"; -import { Artifact, DenoRuntimeData } from "../../typegraph/types.ts"; +import { DenoRuntimeData } from "../../typegraph/types.ts"; import * as ast from "graphql/ast"; import { InternalAuth } from "../../services/auth/protocols/internal.ts"; import { DenoMessenger } from "./deno_messenger.ts"; @@ -43,6 +43,7 @@ export class DenoRuntime extends Runtime { secretManager, typegate, } = params as RuntimeInitParams; + const artifacts = tg.meta.artifacts; const { worker: name } = args as unknown as DenoRuntimeData; if (name == null) { @@ -63,10 +64,7 @@ export class DenoRuntime extends Runtime { const ops = new Map(); const uuid = crypto.randomUUID(); - const basePath = path.join( - typegate.tmpDir, - "artifacts", - ); + const basePath = path.join(typegate.tmpDir, "artifacts"); let registryCount = 0; for (const mat of materializers) { @@ -82,17 +80,17 @@ export class DenoRuntime extends Runtime { registryCount += 1; } else if (mat.name === "module") { const matData = mat.data; - const denoArtifact = matData.denoArtifact as Artifact; - const depArtifacts = matData.depsMeta as Artifact[]; + const entryPoint = artifacts[matData.entryPoint as string]; + const deps = (matData.deps as string[]).map((dep) => artifacts[dep]); const moduleMeta = { typegraphName: typegraphName, - relativePath: denoArtifact.path, - hash: denoArtifact.hash, - sizeInBytes: denoArtifact.size, + relativePath: entryPoint.path, + hash: entryPoint.hash, + sizeInBytes: entryPoint.size, }; - const depMetas = depArtifacts.map((dep) => { + const depMetas = deps.map((dep) => { return { typegraphName: typegraphName, relativePath: dep.path, @@ -122,7 +120,7 @@ export class DenoRuntime extends Runtime { }); // TODO: can a single aritfact be used by multiple materializers? - registry.set(denoArtifact.hash, registryCount); + registry.set(entryPoint.hash, registryCount); registryCount += 1; } } @@ -222,8 +220,9 @@ export class DenoRuntime extends Runtime { if (mat.name === "import_function") { const modMat = this.tg.materializers[mat.data.mod as number]; - const denoAritfact = modMat.data.denoArtifact as Artifact; - const op = this.registry.get(denoAritfact.hash)!; + const entryPoint = + this.tg.meta.artifacts[modMat.data.entryPoint as string]; + const op = this.registry.get(entryPoint.hash)!; return async ({ _: { diff --git a/typegate/src/runtimes/python.ts b/typegate/src/runtimes/python.ts index 8bb3ce1cd1..6219fe20e9 100644 --- a/typegate/src/runtimes/python.ts +++ b/typegate/src/runtimes/python.ts @@ -6,7 +6,7 @@ import { getLogger, Logger } from "../log.ts"; import { Runtime } from "./Runtime.ts"; import type { Resolver, RuntimeInitParams } from "../types.ts"; import { ComputeStage } from "../engine/query_engine.ts"; -import { Artifact, Materializer } from "../typegraph/types.ts"; +import { Materializer } from "../typegraph/types.ts"; import * as ast from "graphql/ast"; import { WitWireMessenger } from "./wit_wire/mod.ts"; import { WitWireMatInfo } from "../../engine/runtime.js"; @@ -32,98 +32,93 @@ export class PythonRuntime extends Runtime { logger.info("initializing PythonRuntime"); logger.debug("init params: " + JSON.stringify(params)); const { materializers, typegraphName, typegraph, typegate } = params; + const artifacts = typegraph.meta.artifacts; const wireMatInfos = await Promise.all( materializers .filter((mat) => mat.name != "pymodule") - .map( - async (mat) => { - let matInfoData: object; - switch (mat.name) { - case "lambda": - matInfoData = { - ty: "lambda", - effect: mat.effect, - source: mat.data.fn as string, - }; - break; - case "def": - matInfoData = { - ty: "def", - func_name: mat.data.name as string, - effect: mat.effect, - source: mat.data.fn as string, - }; - break; - case "import_function": { - const pyModMat = - typegraph.materializers[mat.data.mod as number]; - - // resolve the python module artifacts/files - const { pythonArtifact, depsMeta: depArtifacts } = - pyModMat.data; - - const deps = depArtifacts as Artifact[]; - const artifact = pythonArtifact as Artifact; - - const sources = Object.fromEntries( - await Promise.all( - [ - { + .map(async (mat) => { + let matInfoData: object; + switch (mat.name) { + case "lambda": + matInfoData = { + ty: "lambda", + effect: mat.effect, + source: mat.data.fn as string, + }; + break; + case "def": + matInfoData = { + ty: "def", + func_name: mat.data.name as string, + effect: mat.effect, + source: mat.data.fn as string, + }; + break; + case "import_function": { + const pyModMat = typegraph.materializers[mat.data.mod as number]; + + // resolve the python module artifacts/files + const entryPoint = artifacts[pyModMat.data.entryPoint as string]; + const deps = (pyModMat.data.deps as string[]).map( + (dep) => artifacts[dep], + ); + + const sources = Object.fromEntries( + await Promise.all( + [ + { + typegraphName: typegraphName, + relativePath: entryPoint.path, + hash: entryPoint.hash, + sizeInBytes: entryPoint.size, + }, + ...deps.map((dep) => { + return { typegraphName: typegraphName, - relativePath: artifact.path, - hash: artifact.hash, - sizeInBytes: artifact.size, - }, - ...deps.map((dep) => { - return { - typegraphName: typegraphName, - relativePath: dep.path, - hash: dep.hash, - sizeInBytes: dep.size, - }; - }), - ].map( - async (meta) => - [ - meta.relativePath, - await Deno.readTextFile( - await typegate.artifactStore.getLocalPath(meta), - ), - ] as const, - ), + relativePath: dep.path, + hash: dep.hash, + sizeInBytes: dep.size, + }; + }), + ].map( + async (meta) => + [ + meta.relativePath, + await Deno.readTextFile( + await typegate.artifactStore.getLocalPath(meta), + ), + ] as const, ), - ); - - matInfoData = { - ty: "import_function", - effect: mat.effect, - root_src_path: artifact.path, - func_name: mat.data.name as string, - sources, - }; - break; - } - default: - throw new Error(`unsupported materializer type: ${mat.name}`); + ), + ); + + matInfoData = { + ty: "import_function", + effect: mat.effect, + root_src_path: entryPoint.path, + func_name: mat.data.name as string, + sources, + }; + break; } - - // TODO: use materializer type node hash instead - const dataHash = await sha256(JSON.stringify(mat.data)); - const op_name = `${mat.data.name as string}_${ - dataHash.slice(0, 12) - }`; - - const out: WitWireMatInfo = { - op_name, - mat_hash: dataHash, - // TODO: source title of materializer type? - mat_title: mat.data.name as string, - mat_data_json: JSON.stringify(matInfoData), - }; - return out; - }, - ), + default: + throw new Error(`unsupported materializer type: ${mat.name}`); + } + + // TODO: use materializer type node hash instead + const dataHash = await sha256(JSON.stringify(mat.data)); + const op_name = `${mat.data.name as string}_${dataHash.slice(0, 12)}`; + + const out: WitWireMatInfo = { + op_name, + mat_hash: dataHash, + // TODO: source title of materializer type? + mat_title: mat.data.name as string, + mat_data_json: JSON.stringify(matInfoData), + }; + return out; + }), ); // add default vm for lambda/def @@ -161,43 +156,46 @@ export class PythonRuntime extends Runtime { _verbose: boolean, ): Promise { if (stage.props.node === "__typename") { - return [stage.withResolver(() => { - const { parent: parentStage } = stage.props; - if (parentStage != null) { - return parentStage.props.outType.title; - } - switch (stage.props.operationType) { - case ast.OperationTypeNode.QUERY: - return "Query"; - case ast.OperationTypeNode.MUTATION: - return "Mutation"; - default: - throw new Error( - `Unsupported operation type '${stage.props.operationType}'`, - ); - } - })]; + return [ + stage.withResolver(() => { + const { parent: parentStage } = stage.props; + if (parentStage != null) { + return parentStage.props.outType.title; + } + switch (stage.props.operationType) { + case ast.OperationTypeNode.QUERY: + return "Query"; + case ast.OperationTypeNode.MUTATION: + return "Mutation"; + default: + throw new Error( + `Unsupported operation type '${stage.props.operationType}'`, + ); + } + }), + ]; } if (stage.props.materializer != null) { const mat = stage.props.materializer; - return [ - stage.withResolver(await this.delegate(mat)), - ]; + return [stage.withResolver(await this.delegate(mat))]; } if (stage.props.outType.config?.__namespace) { return [stage.withResolver(() => ({}))]; } - return [stage.withResolver(({ _: { parent } }) => { - if (stage.props.parent == null) { // namespace - return {}; - } - const resolver = parent[stage.props.node]; - return typeof resolver === "function" ? resolver() : resolver; - })]; + return [ + stage.withResolver(({ _: { parent } }) => { + if (stage.props.parent == null) { + // namespace + return {}; + } + const resolver = parent[stage.props.node]; + return typeof resolver === "function" ? resolver() : resolver; + }), + ]; } async delegate(mat: Materializer): Promise { diff --git a/typegate/src/typegate/artifacts/mod.ts b/typegate/src/typegate/artifacts/mod.ts index 0c9963ebdc..b896b1ec6c 100644 --- a/typegate/src/typegate/artifacts/mod.ts +++ b/typegate/src/typegate/artifacts/mod.ts @@ -120,12 +120,7 @@ export class ArtifactStore implements AsyncDisposable { stack.use(uploadEndpoints); stack.use(refCounter); return await Promise.resolve( - new ArtifactStore( - persistence, - uploadEndpoints, - refCounter, - stack.move(), - ), + new ArtifactStore(persistence, uploadEndpoints, refCounter, stack.move()), ); } @@ -134,8 +129,7 @@ export class ArtifactStore implements AsyncDisposable { private uploadEndpoints: UploadEndpointManager, private refCounter: RefCounter, private disposables: AsyncDisposableStack, - ) { - } + ) {} async [Symbol.asyncDispose]() { if (this.#disposed) return; diff --git a/typegraph/core/src/conversion/runtimes.rs b/typegraph/core/src/conversion/runtimes.rs index 869b3a9059..dd8772a66a 100644 --- a/typegraph/core/src/conversion/runtimes.rs +++ b/typegraph/core/src/conversion/runtimes.rs @@ -102,11 +102,8 @@ impl MaterializerConverter for DenoMaterializer { } Module(module) => { let data = serde_json::from_value(json!({ - "denoArtifact": json!({ - "path": module.file, - }), + "entryPoint": module.file, "deps": module.deps, - "depsMeta": None::, })) .unwrap(); ("module".to_string(), data) @@ -234,11 +231,8 @@ impl MaterializerConverter for PythonMaterializer { } Module(module) => { let data = serde_json::from_value(json!({ - "pythonArtifact":json!({ - "path": module.file - }), + "entryPoint": module.file, "deps": module.deps, - "depsMeta": None::, })) .map_err(|e| e.to_string())?; diff --git a/typegraph/core/src/global_store.rs b/typegraph/core/src/global_store.rs index 416ac8c631..05ae5f27b7 100644 --- a/typegraph/core/src/global_store.rs +++ b/typegraph/core/src/global_store.rs @@ -14,7 +14,6 @@ use crate::wit::utils::Auth as WitAuth; use crate::wit::runtimes::{Effect, MaterializerDenoPredefined, MaterializerId}; use graphql_parser::parse_query; use indexmap::IndexMap; -use std::path::PathBuf; use std::rc::Rc; use std::{cell::RefCell, collections::HashMap}; @@ -59,11 +58,9 @@ pub struct Store { graphql_endpoints: Vec, auths: Vec, - deploy_cwd_dir: Option, random_seed: Option, latest_alias_no: u32, - codegen_flag: Option, } impl Store { @@ -219,16 +216,6 @@ impl Store { }) } - pub fn set_deploy_cwd(value: PathBuf) { - with_store_mut(|s| { - s.deploy_cwd_dir = Some(value); - }) - } - - pub fn get_deploy_cwd() -> Option { - with_store(|s| s.deploy_cwd_dir.clone()) - } - pub fn get_random_seed() -> Option { with_store(|store| store.random_seed) } @@ -509,16 +496,6 @@ impl Store { pub fn get_auths() -> Vec { with_store(|s| s.auths.clone()) } - - pub fn set_codegen_flag(status: Option) { - with_store_mut(|s| { - s.codegen_flag = status; - }) - } - - pub fn get_codegen_flag() -> bool { - with_store(|s| s.codegen_flag.unwrap_or(false)) - } } /// Generate a pub fn for asserting/unwrapping a Type as a specific TypeDef variant diff --git a/typegraph/core/src/lib.rs b/typegraph/core/src/lib.rs index d42d1ed06b..e5b8460645 100644 --- a/typegraph/core/src/lib.rs +++ b/typegraph/core/src/lib.rs @@ -4,6 +4,7 @@ mod conversion; mod errors; mod global_store; +mod logger; mod params; mod runtimes; mod t; diff --git a/typegraph/core/src/logger.rs b/typegraph/core/src/logger.rs new file mode 100644 index 0000000000..6f7a397a20 --- /dev/null +++ b/typegraph/core/src/logger.rs @@ -0,0 +1,63 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + +#[allow(unused)] +macro_rules! debug { + ( $($arg:tt)* ) => { + { + use std::fmt::Write as _; + + let mut msg = "debug: ".to_string(); + write!(&mut msg, $($arg)*).unwrap(); + $crate::wit::metatype::typegraph::host::print(&msg); + } + }; +} + +#[allow(unused)] +macro_rules! info { + ( $($arg:tt)* ) => { + { + use std::fmt::Write as _; + + let mut msg = "info: ".to_string(); + write!(&mut msg, $($arg)*).unwrap(); + $crate::wit::metatype::typegraph::host::print(&msg); + } + }; +} + +#[allow(unused)] +macro_rules! warning { + ( $($arg:tt)* ) => { + { + use std::fmt::Write as _; + + let mut msg = "warn: ".to_string(); + write!(&mut msg, $($arg)*).unwrap(); + $crate::wit::metatype::typegraph::host::print(&msg); + } + }; +} + +#[allow(unused)] +macro_rules! error { + ( $($arg:tt)* ) => { + { + use std::fmt::Write as _; + + let mut msg = "error: ".to_string(); + write!(&mut msg, $($arg)*).unwrap(); + $crate::wit::metatype::typegraph::host::print(&msg); + } + }; +} + +#[allow(unused)] +pub(crate) use debug; +#[allow(unused)] +pub(crate) use error; +#[allow(unused)] +pub(crate) use info; +#[allow(unused)] +pub(crate) use warning; diff --git a/typegraph/core/src/typegraph.rs b/typegraph/core/src/typegraph.rs index 582af944f0..98bc87f77c 100644 --- a/typegraph/core/src/typegraph.rs +++ b/typegraph/core/src/typegraph.rs @@ -23,7 +23,6 @@ use std::cell::RefCell; use std::collections::hash_map::Entry; use std::collections::HashMap; use std::hash::Hasher as _; - use std::rc::Rc; use crate::wit::core::{ @@ -248,7 +247,9 @@ pub fn serialize(params: SerializeParams) -> Result<(String, Vec)> let result = match result.map_err(|e| e.to_string().into()) { Ok(res) => res, - Err(e) => return Err(e), + Err(e) => { + return Err(e); + } }; Ok((result, artifacts)) diff --git a/typegraph/core/src/utils/archive.rs b/typegraph/core/src/utils/archive.rs new file mode 100644 index 0000000000..29d999b170 --- /dev/null +++ b/typegraph/core/src/utils/archive.rs @@ -0,0 +1,72 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + +use common::archive::{ + archive_entries_from_bytes, encode_bytes_to_base_64, tarb64_unpack_entries_as_map, +}; +use indexmap::IndexMap; + +use super::fs::FsContext; +use std::path::Path; + +pub trait ArchiveExt { + fn compress_and_encode(&self, path: &Path) -> Result; + fn unpack_base64(&self, tarb64: &str, dest: &Path) -> Result<(), String>; +} + +impl FsContext { + fn load_tg_ignore(&self, file: &Path) -> Result, String> { + if self.exists(file)? { + let content = self.read_text_file(file)?; + + Ok(content + .lines() + .filter_map(|line| { + let trimmed = line.trim(); + if trimmed.is_empty() || trimmed.starts_with('#') { + None + } else { + Some(line.to_string()) + } + }) + .collect::>()) + } else { + Ok(vec![]) + } + } +} + +impl ArchiveExt for FsContext { + fn compress_and_encode(&self, path: &Path) -> Result { + let ignore = { + let tg_ignore_path = Path::new(".tgignore"); + let mut ignore = self.load_tg_ignore(tg_ignore_path)?; + ignore.extend(["node_modules".to_string(), ".git".to_string()]); + ignore + }; + + let paths = self.expand_path(path, &ignore)?; + // TODO do not load everything in memory + let entries = paths + .iter() + .map(|p| { + self.read_file(p) + .map(|content| (p.to_string_lossy().into(), content)) + }) + .collect::, _>>()?; + + let bytes = archive_entries_from_bytes(entries).map_err(|e| e.to_string())?; + encode_bytes_to_base_64(bytes).map_err(|e| e.to_string()) + } + + fn unpack_base64(&self, tarb64: &str, dest: &Path) -> Result<(), String> { + // TODO iterator instead of loading everything in memory + let contents = tarb64_unpack_entries_as_map(Some(tarb64)).map_err(|e| e.to_string())?; + + for (path, bytes) in contents { + self.write_file(&dest.join(path), &bytes)?; + } + + Ok(()) + } +} diff --git a/typegraph/core/src/utils/artifacts.rs b/typegraph/core/src/utils/artifacts.rs new file mode 100644 index 0000000000..7a0361a97f --- /dev/null +++ b/typegraph/core/src/utils/artifacts.rs @@ -0,0 +1,26 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + +use std::path::PathBuf; + +use super::fs::FsContext; +use common::typegraph::{runtimes::Artifact, Typegraph}; + +pub trait ArtifactsExt { + /// update the artifact meta, and register the artifact in the typegraph + fn register_artifact(&self, artifact_path: PathBuf, tg: &mut Typegraph) -> Result<(), String>; +} + +impl ArtifactsExt for FsContext { + fn register_artifact(&self, path: PathBuf, tg: &mut Typegraph) -> Result<(), String> { + use std::collections::btree_map::Entry; + if let Entry::Vacant(entry) = tg.meta.artifacts.entry(path) { + let path = entry.key().to_path_buf(); + let (hash, size) = self.hash_file(&path)?; + tg.deps.push(path.clone()); + entry.insert(Artifact { hash, size, path }); + } + + Ok(()) + } +} diff --git a/typegraph/core/src/utils/fs.rs b/typegraph/core/src/utils/fs.rs new file mode 100644 index 0000000000..e785883789 --- /dev/null +++ b/typegraph/core/src/utils/fs.rs @@ -0,0 +1,147 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + +use super::pathlib::PathLib; +use crate::wit::metatype::typegraph::host::{ + expand_path as expand_path_host, path_exists as path_exists_host, read_file as read_file_host, + write_file as write_file_host, +}; +use sha2::{Digest, Sha256}; +use std::{ + collections::BTreeSet, + path::{Path, PathBuf}, +}; + +pub struct FsContext { + pathlib: PathLib, +} + +impl FsContext { + pub fn new(base_dir: PathBuf) -> Self { + Self { + pathlib: PathLib::new(base_dir), + } + } + + pub fn exists(&self, path: &Path) -> Result { + path_exists_host(&self.pathlib.get_base_dir().join(path).to_string_lossy()) + } + + pub fn expand_path( + &self, + path: &Path, + exclude_globs: &[String], + ) -> Result, String> { + let exclude_as_regex = exclude_globs + .iter() + .map(|glob_pattern| { + let mut regex_pattern = String::new(); + for c in glob_pattern.chars() { + match c { + '*' => regex_pattern.push_str(".*"), + '?' => regex_pattern.push('.'), + _ => { + if ".()+-[]^$|".contains(c) { + // escape native regex + regex_pattern.push('\\'); + } + regex_pattern.push(c); + } + } + } + // test as suffix if glob star is present + if glob_pattern.contains('*') { + regex_pattern.push('$'); + } + regex_pattern + }) + .collect::>(); + + expand_path_host( + &self.pathlib.get_base_dir().join(path).to_string_lossy(), + &exclude_as_regex, + )? + .iter() + .map(|p| self.pathlib.relative(Path::new(p))) + .collect::, _>>() + } + + fn extract_glob_dirname(path: &str) -> PathBuf { + let path = PathBuf::from(path); + let dirs: Vec<_> = path.components().map(|comp| comp.as_os_str()).collect(); + let mut parent_dir = PathBuf::new(); + let special_chars = &['*', '?', '[', ']']; + + for dir in dirs { + let dir = dir.to_str().unwrap(); + if dir.find(special_chars).is_some() { + break; + } + parent_dir = parent_dir.join(dir); + } + + parent_dir + } + + pub fn expand_glob(&self, path: &Path) -> Result, String> { + let path_str = path.to_string_lossy(); + let parent_dir = Self::extract_glob_dirname(&path_str); + let all_files = self.expand_path(&parent_dir, &[])?; + + let glob_pattern = glob::Pattern::new(&path_str).unwrap(); + + Ok(all_files + .into_iter() + .filter(|p| glob_pattern.matches(&p.to_string_lossy())) + .collect::>()) + } + + fn is_glob(path: &str) -> bool { + // dir can also contain wild cards, + path.contains('*') || path.contains('?') + } + + pub fn list_files(&self, glob_or_dirs: &[String]) -> Vec { + glob_or_dirs + .iter() + .flat_map(|dep| { + if Self::is_glob(dep) { + self.expand_glob(Path::new(dep)).into_iter().flatten() + } else { + self.expand_path(Path::new(dep), &[]).into_iter().flatten() + } + }) + .collect::>() + .into_iter() + .collect() + } + + pub fn read_file(&self, path: &Path) -> Result, String> { + read_file_host(&self.pathlib.get_base_dir().join(path).to_string_lossy()) + } + + pub fn read_text_file(&self, path: &Path) -> Result { + self.read_file(path) + .and_then(|bytes| String::from_utf8(bytes).map_err(|e| e.to_string())) + } + + pub fn write_file(&self, path: &Path, bytes: &[u8]) -> Result<(), String> { + write_file_host( + &self.pathlib.get_base_dir().join(path).to_string_lossy(), + bytes, + ) + } + + pub fn write_text_file(&self, path: &Path, text: String) -> Result<(), String> { + self.write_file(path, text.as_bytes()) + } + + // TODO limited buffer? + pub fn hash_file(&self, path: &Path) -> Result<(String, u32), String> { + let mut sha256 = Sha256::new(); + let bytes = self.read_file(path)?; + let size = bytes.len() as u32; + sha256.update(bytes); + Ok((format!("{:x}", sha256.finalize()), size)) + } +} diff --git a/typegraph/core/src/utils/fs_host.rs b/typegraph/core/src/utils/fs_host.rs deleted file mode 100644 index b3d03f79f2..0000000000 --- a/typegraph/core/src/utils/fs_host.rs +++ /dev/null @@ -1,296 +0,0 @@ -// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. -// SPDX-License-Identifier: MPL-2.0 - -use std::{ - collections::BTreeSet, - path::{Path, PathBuf}, -}; - -use glob::Pattern as GlobPattern; - -use crate::{ - global_store::Store, - wit::metatype::typegraph::host::{ - eprint, expand_path as expand_path_host, get_cwd, path_exists as path_exists_host, - read_file, write_file, - }, -}; -use common::archive::{ - archive_entries_from_bytes, encode_bytes_to_base_64, tarb64_unpack_entries_as_map, -}; -use indexmap::IndexMap; -use sha2::{Digest, Sha256}; - -pub fn read_text_file(path: &Path) -> Result { - read_file(&path.display().to_string()).and_then(|bytes| { - let s = std::str::from_utf8(&bytes).map_err(|e| e.to_string())?; - Ok(s.to_owned()) - }) -} - -pub fn write_text_file(path: &Path, text: String) -> Result<(), String> { - write_file(&path.display().to_string(), text.as_bytes()) -} - -pub fn common_prefix_paths(paths: &[PathBuf]) -> Option { - if paths.len() <= 1 { - return None; - } - - // always >= 1 - let mut prefix = paths[0].clone(); - for path in paths.iter().skip(1) { - prefix = prefix - .components() - .zip(path.components()) - .take_while(|&(a, b)| a == b) - .map(|(a, _)| a) - .collect::<_>(); - } - - // [/]a/b => if path is absolute, path_chunk[0] is an empty string - if prefix.components().count() == 0 { - return None; - } - - Some(prefix) -} - -pub fn relativize_paths(paths: &[PathBuf]) -> Result, String> { - if paths.is_empty() { - return Ok(vec![]); - } - - // ambiguous case, assume it is under cwd - if paths.len() == 1 { - let possible_base = cwd()?; - return paths[0] - .strip_prefix(&possible_base) - .map(|stripped| vec![stripped.to_owned()]) - .map_err(|_| format!("{:?} does not contain path", possible_base.display())); - } - - if let Some(common_dir) = common_prefix_paths(paths) { - let ret = paths - .iter() - .map(|path| { - path.strip_prefix(common_dir.clone()) - .map_err(|e| e.to_string()) - .map(|v| v.to_path_buf()) - }) - .collect::, String>>()?; - return Ok(ret); - } - - Err("Cannot relativize path list if one item is already relative".to_string()) -} - -pub fn expand_path(path: &Path, exclude_glob: &[String]) -> Result, String> { - let exclude_as_regex = exclude_glob - .iter() - .map(|glob_pattern| { - let mut regex_pattern = String::new(); - for c in glob_pattern.chars() { - match c { - '*' => regex_pattern.push_str(".*"), - '?' => regex_pattern.push('.'), - _ => { - if ".()+-[]^$|".contains(c) { - // escape native regex - regex_pattern.push('\\'); - } - regex_pattern.push(c); - } - } - } - // test as suffix if glob star is present - if glob_pattern.contains('*') { - regex_pattern.push('$'); - } - regex_pattern - }) - .collect::>(); - - let ret = expand_path_host(&path.display().to_string(), &exclude_as_regex)? - .iter() - .map(PathBuf::from) - .collect(); - Ok(ret) -} - -pub fn compress>(path: P, exclude: Option>) -> Result, String> { - // Note: each exclude entry is a regex pattern - let exclude = exclude.unwrap_or_default(); - let paths = expand_path(&PathBuf::from(path.into()), &exclude)?; - let mut entries = IndexMap::new(); - // eprint("Preparing tarball"); - - let abs_paths = paths.iter().map(PathBuf::from).collect::>(); - let rel_paths = relativize_paths(&abs_paths)?; - - for (i, abs_path) in abs_paths.iter().enumerate() { - let rel_path_str = rel_paths[i].to_string_lossy(); - // eprint(&format!(" ++ {}", rel_path_str.clone())); - // Note: tarball path should be relative - // Note: Strip against workdir does not work when the sdk is spawn from another process - entries.insert(rel_path_str.into(), read_file(&abs_path.to_string_lossy())?); - } - - archive_entries_from_bytes(entries).map_err(|e| e.to_string()) -} - -pub fn unpack_base64>(tarb64: &str, dest: P) -> Result<(), String> { - let dest = PathBuf::from(dest.into()); - let contents = tarb64_unpack_entries_as_map(Some(tarb64)).map_err(|e| e.to_string())?; - - for (path, bytes) in contents { - let dest_file = dest.join(path).display().to_string(); - write_file(&dest_file, &bytes)?; - } - - Ok(()) -} - -pub fn compress_and_encode_base64(path: PathBuf) -> Result { - let mut tgignore = load_tg_ignore_file()?; - let default = vec!["node_modules".to_string(), ".git".to_string()]; - tgignore.extend(default); - - let bytes = compress(path.display().to_string(), Some(tgignore))?; - encode_bytes_to_base_64(bytes).map_err(|e| e.to_string()) -} - -/// Search for .tgignore file at `cwd`, if nothing is found, an empty `Vec` is returned -pub fn load_tg_ignore_file() -> Result, String> { - let file = cwd()?.join(".tgignore"); - - match path_exists(&file)? { - true => read_text_file(&file).map(|content| { - content - .lines() - .filter_map(|line| { - let trimmed = line.trim(); - if trimmed.is_empty() || trimmed.starts_with('#') { - return None; - } - Some(line.to_owned()) - }) - .collect() - }), - false => Ok(vec![]), - } -} - -/// Returns `get_cwd()` by default, custom `dir` otherwise -pub fn cwd() -> Result { - match Store::get_deploy_cwd() { - Some(path) => Ok(path), - None => Ok(PathBuf::from(get_cwd()?)), - } -} - -/// Strip given path with `cwd` -#[allow(dead_code)] -pub fn make_relative(path: &Path) -> Result { - path.strip_prefix(cwd()?) - .map_err(|e| e.to_string()) - .map(|r| r.to_owned()) -} - -/// Join given path with `cwd` -pub fn make_absolute(path: &Path) -> Result { - match path.is_relative() { - true => Ok(cwd()?.join(path)), - false => Ok(path.to_owned()), - } -} - -// TODO: use smaller buffer? -pub fn hash_file(path: &Path) -> Result<(String, u32), String> { - let mut sha256 = Sha256::new(); - let bytes = read_file(&path.to_string_lossy())?; - let size = bytes.len() as u32; - sha256.update(bytes); - Ok((format!("{:x}", sha256.finalize()), size)) -} - -pub fn path_exists(path: &Path) -> Result { - path_exists_host(&path.to_string_lossy()) -} - -pub fn is_glob(path: &str) -> bool { - // dir can also contain wild cards, - path.contains('*') || path.contains('?') -} - -pub fn extract_glob_dirname(path: &str) -> PathBuf { - let path = PathBuf::from(path); - let dirs: Vec<_> = path.components().map(|comp| comp.as_os_str()).collect(); - let mut parent_dir = PathBuf::new(); - let special_chars = &['*', '?', '[', ']']; - - for dir in dirs { - let dir = dir.to_str().unwrap(); - if dir.find(special_chars).is_some() { - break; - } - parent_dir = parent_dir.join(dir); - } - - parent_dir -} - -pub fn expand_glob(path: &str) -> Result, String> { - let abs_path = make_absolute(&PathBuf::from(path))? - .to_string_lossy() - .to_string(); - - let parent_dir = extract_glob_dirname(&abs_path); - let all_files = expand_path(&parent_dir, &[])?; - - let glob_pattern = GlobPattern::new(&abs_path).unwrap(); - - let mut matching_files = vec![]; - for file in all_files { - if glob_pattern.matches(file.to_str().unwrap()) { - matching_files.push(file); - } - } - - Ok(matching_files) -} - -pub fn resolve_globs_dirs(deps: Vec) -> Result, String> { - let mut resolved_deps = BTreeSet::new(); - for dep in deps { - if is_glob(&dep) { - let abs_path = make_absolute(&PathBuf::from(dep))? - .to_string_lossy() - .to_string(); - - let matching_files = expand_glob(&abs_path).map_err(|err| { - eprint(&format!("Error resolving globs: {:?}", err)); - err - })?; - for file in matching_files { - let rel_path = make_relative(&file)?; - resolved_deps.insert(rel_path); - } - } else { - let all_files = - expand_path(&make_absolute(&PathBuf::from(dep))?, &[]).map_err(|err| { - eprint(&format!( - "Error resolving dependencies and dependency directories: {:?}", - err - )); - err - })?; - for file in all_files { - let rel_path = make_relative(&file)?; - resolved_deps.insert(rel_path); - } - } - } - - Ok(resolved_deps.into_iter().collect()) -} diff --git a/typegraph/core/src/utils/mod.rs b/typegraph/core/src/utils/mod.rs index 652519eba7..7802839047 100644 --- a/typegraph/core/src/utils/mod.rs +++ b/typegraph/core/src/utils/mod.rs @@ -6,6 +6,7 @@ use std::path::PathBuf; use crate::utils::metagen_utils::RawTgResolver; use common::typegraph::{Auth, AuthProtocol}; +use fs::FsContext; use indexmap::IndexMap; use serde_json::json; @@ -19,9 +20,12 @@ use crate::wit::utils::{Auth as WitAuth, MdkConfig, MdkOutput, QueryDeployParams use crate::Lib; use std::path::Path; -pub mod fs_host; +mod archive; +mod artifacts; +mod fs; pub mod metagen_utils; mod oauth2; +mod pathlib; pub mod postprocess; pub mod reduce; @@ -256,21 +260,10 @@ impl crate::wit::utils::Guest for crate::Lib { Ok(req_body.to_string()) } - fn unpack_tarb64(tar_b64: String, dest: String) -> Result<()> { - fs_host::unpack_base64(&tar_b64, dest).map_err(|e| e.into()) - } - fn remove_injections(id: CoreTypeId) -> Result { remove_injections_recursive(id.into()).map(|id| id.into()) } - fn get_cwd() -> Result { - match fs_host::cwd() { - Ok(path) => Ok(path.display().to_string()), - Err(e) => Err(e), - } - } - fn metagen_exec(config: MdkConfig) -> Result, String> { let gen_config: metagen::Config = serde_json::from_str(&config.config_json) .map_err(|e| format!("Load metagen config: {}", e))?; @@ -297,13 +290,13 @@ impl crate::wit::utils::Guest for crate::Lib { .map_err(|e| format!("Generate target: {}", e)) } - fn metagen_write_files(items: Vec) -> Result<(), String> { + fn metagen_write_files(items: Vec, typegraph_dir: String) -> Result<(), String> { + let fs_ctx = FsContext::new(typegraph_dir.into()); for item in items { - let path = fs_host::make_absolute(Path::new(&item.path))?; - if fs_host::path_exists(&path)? && !item.overwrite { + if fs_ctx.exists(Path::new(&item.path))? && !item.overwrite { continue; } - fs_host::write_text_file(&path, item.content)?; + fs_ctx.write_text_file(Path::new(&item.path), item.content)?; } Ok(()) } diff --git a/typegraph/core/src/utils/pathlib.rs b/typegraph/core/src/utils/pathlib.rs new file mode 100644 index 0000000000..6ed76d1f7a --- /dev/null +++ b/typegraph/core/src/utils/pathlib.rs @@ -0,0 +1,24 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + +use std::path::{Path, PathBuf}; + +pub struct PathLib { + base_dir: PathBuf, +} + +impl PathLib { + pub fn new(base_dir: PathBuf) -> Self { + Self { base_dir } + } + + pub fn get_base_dir(&self) -> &PathBuf { + &self.base_dir + } + + pub fn relative(&self, path: &Path) -> Result { + path.strip_prefix(&self.base_dir) + .map_err(|e| e.to_string()) + .map(|r| r.to_path_buf()) + } +} diff --git a/typegraph/core/src/utils/postprocess/deno_rt.rs b/typegraph/core/src/utils/postprocess/deno_rt.rs index 250a42af3a..9189567bbd 100644 --- a/typegraph/core/src/utils/postprocess/deno_rt.rs +++ b/typegraph/core/src/utils/postprocess/deno_rt.rs @@ -1,130 +1,49 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use crate::{ - global_store::Store, - utils::fs_host::{self, resolve_globs_dirs}, -}; -use common::typegraph::{ - runtimes::{deno::ModuleMatData, Artifact}, - utils::{map_from_object, object_from_map}, - Materializer, Typegraph, -}; +use common::typegraph::runtimes::deno::ModuleMatData; +use common::typegraph::utils::map_from_object; +use common::typegraph::{utils::object_from_map, Typegraph}; use std::path::PathBuf; -use crate::utils::postprocess::PostProcessor; +use crate::utils::{artifacts::ArtifactsExt, fs::FsContext, postprocess::PostProcessor}; -pub struct ResolveModuleOuput { - tg_artifacts: Vec, - tg_deps_paths: Vec, +pub struct DenoProcessor { + typegraph_dir: PathBuf, } -pub struct DenoProcessor; +impl DenoProcessor { + pub fn new(typegraph_dir: PathBuf) -> Self { + Self { typegraph_dir } + } +} impl PostProcessor for DenoProcessor { fn postprocess(self, tg: &mut Typegraph) -> Result<(), crate::errors::TgError> { - for mat in tg.materializers.iter_mut() { + let fs_ctx = FsContext::new(self.typegraph_dir); + let mut materializers = std::mem::take(&mut tg.materializers); + for mat in materializers.iter_mut() { if mat.name.as_str() == "module" { - match Self::resolve_module(mat)? { - Some(ResolveModuleOuput { - tg_deps_paths: dep_paths, - tg_artifacts: artifacts, - }) => { - for i in 0..artifacts.len() { - let artifact = &artifacts[i]; - let dep_path = &dep_paths[i]; - tg.deps.push(dep_path.clone()); - tg.meta - .artifacts - .insert(artifact.path.clone(), artifact.clone()); - } - } - None => continue, - } - } - } - Ok(()) - } -} - -impl DenoProcessor { - pub fn resolve_module(mat: &mut Materializer) -> Result, String> { - let mut mat_data: ModuleMatData = - object_from_map(std::mem::take(&mut mat.data)).map_err(|e| e.to_string())?; - - let deno_module_path = mat_data - .deno_artifact - .get("path") - .unwrap() - .as_str() - .unwrap(); + let mat_data = std::mem::take(&mut mat.data); + let mut mat_data: ModuleMatData = + object_from_map(mat_data).map_err(|e| e.to_string())?; - let path = PathBuf::from(deno_module_path); + fs_ctx.register_artifact(mat_data.entry_point.clone(), tg)?; - // main_path can be either relative or absolute, - // if relative => make it absolute - // fs::canonicalize wouldn't work in this setup - let main_path = fs_host::make_absolute(&path)?; - - let mut tg_deps_paths = vec![]; - let mut tg_artifacts = vec![]; - - if fs_host::path_exists(&main_path)? { - let (module_hash, size) = fs_host::hash_file(&main_path.clone())?; - - let deno_artifact = Artifact { - hash: module_hash.clone(), - size, - path: path.clone(), - }; - tg_deps_paths.push(main_path); - - let deps = mat_data.deps.clone(); - - // resolve globs and dirs - let resolved_deps = resolve_globs_dirs(deps)?; - - for dep_rel_path in resolved_deps { - let dep_abs_path = fs_host::make_absolute(&dep_rel_path)?; + let deps = std::mem::take(&mut mat_data.deps); + for artifact in deps.into_iter() { + let artifacts = fs_ctx.list_files(&[artifact.to_string_lossy().to_string()]); + for artifact in artifacts.iter() { + fs_ctx.register_artifact(artifact.clone(), tg)?; + } + mat_data.deps.extend(artifacts); + } - let (dep_hash, dep_size) = fs_host::hash_file(&dep_abs_path)?; - let dep_artifact = Artifact { - path: dep_rel_path.clone(), - hash: dep_hash, - size: dep_size, - }; - tg_artifacts.push(dep_artifact); - tg_deps_paths.push(dep_abs_path); + mat.data = map_from_object(mat_data).map_err(|e| e.to_string())?; } - - // update post process results - mat_data.deno_artifact = map_from_object(Artifact { - hash: module_hash.clone(), - size, - path, - }) - .map_err(|e| e.to_string())?; - - mat_data.deps_meta = Some( - tg_artifacts - .iter() - .map(|dep| map_from_object(dep).map_err(|e| e.to_string())) - .collect::, _>>()?, - ); - tg_artifacts.push(deno_artifact); - } else if !Store::get_codegen_flag() { - return Err(format!( - "could not resolve module {:?}", - main_path.display(), - )); } - // else cli codegen - - mat.data = map_from_object(mat_data).map_err(|e| e.to_string())?; - Ok(Some(ResolveModuleOuput { - tg_artifacts, - tg_deps_paths, - })) + tg.materializers = materializers; + Ok(()) } } diff --git a/typegraph/core/src/utils/postprocess/mod.rs b/typegraph/core/src/utils/postprocess/mod.rs index 8a3699344b..a56ead8ea6 100644 --- a/typegraph/core/src/utils/postprocess/mod.rs +++ b/typegraph/core/src/utils/postprocess/mod.rs @@ -1,9 +1,9 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use crate::{global_store::Store, utils::fs_host, wit::core::SerializeParams}; +use crate::wit::core::SerializeParams; use common::typegraph::Typegraph; -use std::path::{Path, PathBuf}; +use std::path::PathBuf; pub mod deno_rt; pub mod prisma_rt; @@ -40,35 +40,17 @@ impl PostProcessor for TypegraphPostProcessor { .parent() .unwrap() .to_owned(); - Store::set_deploy_cwd(typegraph_dir); // fs_host::cwd() will now use this value - Store::set_codegen_flag(Some(config.codegen)); - PrismaProcessor::new(config.prisma_migration).postprocess(tg)?; + PrismaProcessor::new(config.prisma_migration.clone()).postprocess(tg)?; - // Artifact resolution depends on the default cwd() (parent process) - // unless overwritten by `dir` through Store::set_deploy_cwd(..) (cli or custom dir with tgDeploy) let allow_fs_read_artifacts = config.artifact_resolution; if allow_fs_read_artifacts { - DenoProcessor.postprocess(tg)?; - PythonProcessor.postprocess(tg)?; - WasmProcessor.postprocess(tg)?; + DenoProcessor::new(typegraph_dir.clone()).postprocess(tg)?; + PythonProcessor::new(typegraph_dir.clone()).postprocess(tg)?; + WasmProcessor::new(typegraph_dir.clone()).postprocess(tg)?; } ValidationProcessor.postprocess(tg)?; Ok(()) } } - -#[allow(dead_code)] -pub fn compress_and_encode(main_path: &Path) -> Result { - if let Err(e) = fs_host::read_text_file(main_path) { - return Err(format!("Unable to read {:?}: {}", main_path.display(), e)); - } - - let enc_content = fs_host::compress_and_encode_base64(fs_host::cwd()?)?; - Ok(format!( - "file:{};base64:{}", - fs_host::make_relative(main_path)?.display(), - enc_content - )) -} diff --git a/typegraph/core/src/utils/postprocess/prisma_rt.rs b/typegraph/core/src/utils/postprocess/prisma_rt.rs index 907386386f..2b9cf38898 100644 --- a/typegraph/core/src/utils/postprocess/prisma_rt.rs +++ b/typegraph/core/src/utils/postprocess/prisma_rt.rs @@ -6,11 +6,11 @@ use common::typegraph::runtimes::prisma::MigrationOptions; use common::typegraph::runtimes::{KnownRuntime::Prisma, TGRuntime}; use common::typegraph::Typegraph; -use crate::utils::fs_host; +use crate::utils::archive::ArchiveExt; +use crate::utils::fs::FsContext; use crate::utils::postprocess::PostProcessor; use crate::wit::core::MigrationAction; use crate::wit::core::PrismaMigrationConfig; -use crate::wit::metatype::typegraph::host::path_exists; pub struct PrismaProcessor { config: PrismaMigrationConfig, @@ -31,7 +31,8 @@ impl PostProcessor for PrismaProcessor { impl PrismaProcessor { pub fn embed_prisma_migrations(&self, tg: &mut Typegraph) -> Result<(), String> { - let base_migration_path = PathBuf::from(&self.config.migrations_dir); + let base_migration_path: PathBuf = self.config.migrations_dir.clone().into(); + let fs_ctx = FsContext::new(base_migration_path.clone()); for rt in tg.runtimes.iter_mut() { if let TGRuntime::Known(Prisma(rt_data)) = rt { @@ -42,12 +43,8 @@ impl PrismaProcessor { rt_data.migration_options = Some(MigrationOptions { migration_files: { if action.apply { - let path = fs_host::make_absolute(&path)?; - match path_exists(&path.display().to_string())? { - true => { - let base64 = fs_host::compress_and_encode_base64(path)?; - Some(base64) - } + match fs_ctx.exists(&path)? { + true => Some(fs_ctx.compress_and_encode(&path)?), false => None, } } else { diff --git a/typegraph/core/src/utils/postprocess/python_rt.rs b/typegraph/core/src/utils/postprocess/python_rt.rs index ffd56b0175..9f9e80dfbd 100644 --- a/typegraph/core/src/utils/postprocess/python_rt.rs +++ b/typegraph/core/src/utils/postprocess/python_rt.rs @@ -1,79 +1,51 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use crate::utils::fs_host::{self, resolve_globs_dirs}; +use crate::utils::{artifacts::ArtifactsExt, fs::FsContext, postprocess::PostProcessor}; use common::typegraph::{ - runtimes::{python::ModuleMatData, Artifact}, + runtimes::python::ModuleMatData, utils::{map_from_object, object_from_map}, Typegraph, }; -use std::{collections::btree_map::Entry, path::PathBuf}; +use std::path::PathBuf; -use crate::utils::postprocess::PostProcessor; +pub struct PythonProcessor { + typegraph_dir: PathBuf, +} -pub struct PythonProcessor; +impl PythonProcessor { + pub fn new(typegraph_dir: PathBuf) -> Self { + Self { typegraph_dir } + } +} impl PostProcessor for PythonProcessor { fn postprocess(self, tg: &mut Typegraph) -> Result<(), crate::errors::TgError> { - for mat in tg.materializers.iter_mut() { + let fs_ctx = FsContext::new(self.typegraph_dir.clone()); + let mut materializers = std::mem::take(&mut tg.materializers); + + for mat in materializers.iter_mut() { if mat.name.as_str() == "pymodule" { + let mat_data = std::mem::take(&mut mat.data); let mut mat_data: ModuleMatData = - object_from_map(std::mem::take(&mut mat.data)).map_err(|e| e.to_string())?; - let path = mat_data.python_artifact.get("path").unwrap(); - let path: PathBuf = path.as_str().unwrap().into(); + object_from_map(mat_data).map_err(|e| e.to_string())?; - if let Entry::Vacant(entry) = tg.meta.artifacts.entry(path.clone()) { - let python_module_path = fs_host::make_absolute(&path)?; - - let (module_hash, size) = fs_host::hash_file(&python_module_path)?; - - tg.deps.push(python_module_path); - entry.insert(Artifact { - hash: module_hash.clone(), - size, - path: path.clone(), - }); - } + fs_ctx.register_artifact(mat_data.entry_point.clone(), tg)?; - let main_module = tg.meta.artifacts.get(&path).unwrap().clone(); - - let deps = mat_data.deps.clone(); - let mut dep_artifacts = vec![]; - let resolved_deps = resolve_globs_dirs(deps)?; - - for dep_rel_path in resolved_deps { - let dep_abs_path = fs_host::make_absolute(&dep_rel_path)?; - - let (dep_hash, dep_size) = fs_host::hash_file(&dep_abs_path)?; - let dep_artifact = Artifact { - path: dep_rel_path.clone(), - hash: dep_hash, - size: dep_size, - }; - dep_artifacts.push(dep_artifact.clone()); - if let Entry::Vacant(entry) = tg.meta.artifacts.entry(dep_rel_path.clone()) { - entry.insert(dep_artifact); - tg.deps.push(dep_abs_path); + let deps = std::mem::take(&mut mat_data.deps); + for artifact in deps.into_iter() { + let artifacts = fs_ctx.list_files(&[artifact.to_string_lossy().to_string()]); + for artifact in artifacts.iter() { + fs_ctx.register_artifact(artifact.clone(), tg)?; } + mat_data.deps.extend(artifacts); } - mat_data.python_artifact = map_from_object(Artifact { - hash: main_module.hash.clone(), - size: main_module.size, - path, - }) - .map_err(|e| e.to_string())?; - - mat_data.deps_meta = Some( - dep_artifacts - .iter() - .map(|dep| map_from_object(dep).map_err(|e| e.to_string())) - .collect::, _>>()?, - ); - mat.data = map_from_object(mat_data).map_err(|e| e.to_string())?; } } + + tg.materializers = materializers; Ok(()) } } diff --git a/typegraph/core/src/utils/postprocess/wasm_rt.rs b/typegraph/core/src/utils/postprocess/wasm_rt.rs index b1cbb20ca0..8d1b6c37e1 100644 --- a/typegraph/core/src/utils/postprocess/wasm_rt.rs +++ b/typegraph/core/src/utils/postprocess/wasm_rt.rs @@ -1,20 +1,31 @@ // Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. // SPDX-License-Identifier: MPL-2.0 -use crate::utils::fs_host; +use std::path::PathBuf; + +use crate::utils::{artifacts::ArtifactsExt, fs::FsContext}; use common::typegraph::{ - runtimes::{Artifact, KnownRuntime, TGRuntime}, + runtimes::{KnownRuntime, TGRuntime}, Typegraph, }; -use std::path::PathBuf; use crate::utils::postprocess::PostProcessor; -pub struct WasmProcessor; +pub struct WasmProcessor { + typegraph_dir: PathBuf, +} + +impl WasmProcessor { + pub fn new(typegraph_dir: PathBuf) -> Self { + Self { typegraph_dir } + } +} impl PostProcessor for WasmProcessor { fn postprocess(self, tg: &mut Typegraph) -> Result<(), crate::errors::TgError> { - for rt in &tg.runtimes { + let fs_ctx = FsContext::new(self.typegraph_dir); + let runtimes = std::mem::take(&mut tg.runtimes); + for rt in runtimes.iter() { let data = match rt { TGRuntime::Known(KnownRuntime::WasmReflected(data)) | TGRuntime::Known(KnownRuntime::WasmWire(data)) => data, @@ -22,20 +33,11 @@ impl PostProcessor for WasmProcessor { continue; } }; - let path = PathBuf::from(&data.wasm_artifact); - if tg.meta.artifacts.contains_key(&path) { - continue; - } - - let wasi_path = fs_host::make_absolute(&path)?; - let (hash, size) = fs_host::hash_file(&wasi_path)?; - - tg.deps.push(wasi_path.clone()); - tg.meta - .artifacts - .insert(path.clone(), Artifact { hash, size, path }); + fs_ctx.register_artifact(data.wasm_artifact.clone(), tg)?; } + + tg.runtimes = runtimes; Ok(()) } } diff --git a/typegraph/core/wit/typegraph.wit b/typegraph/core/wit/typegraph.wit index 43b29ea9b7..b8f2c17aca 100644 --- a/typegraph/core/wit/typegraph.wit +++ b/typegraph/core/wit/typegraph.wit @@ -576,12 +576,8 @@ interface utils { gql-deploy-query: func(params: query-deploy-params) -> result; gql-remove-query: func(tg-name: list) -> result; - unpack-tarb64: func(tar-b64: string, dest: string) -> result<_, error>; - remove-injections: func(type-id: type-id) -> result; - get-cwd: func() -> result; - record mdk-config { workspace-path: string, target-name: string, @@ -596,7 +592,7 @@ interface utils { } metagen-exec: func(config: mdk-config) -> result, string>; - metagen-write-files: func(items: list) -> result<_, string>; + metagen-write-files: func(items: list, typegraph-dir: string) -> result<_, string>; } interface host { diff --git a/typegraph/node/sdk/src/envs/cli.ts b/typegraph/node/sdk/src/envs/cli.ts index e2c3d7f9e8..cff279ee7d 100644 --- a/typegraph/node/sdk/src/envs/cli.ts +++ b/typegraph/node/sdk/src/envs/cli.ts @@ -9,6 +9,7 @@ const requiredCliEnvs = [ "config_dir", "working_dir", "migrations_dir", + "artifact_resolution", ] as const; const optionalCliEnvs = ["prefix"] as const; @@ -24,6 +25,7 @@ export interface CliEnv { config_dir: string; working_dir: string; migrations_dir: string; + artifact_resolution: boolean; prefix?: string; } @@ -61,6 +63,10 @@ export function loadCliEnv(): CliEnv | null { } break; + case "artifact_resolution": + record[key] = envValue === "true"; + break; + default: record[key] = envValue; break; diff --git a/typegraph/node/sdk/src/metagen.ts b/typegraph/node/sdk/src/metagen.ts index 52309cb3ad..357bb3aa0e 100644 --- a/typegraph/node/sdk/src/metagen.ts +++ b/typegraph/node/sdk/src/metagen.ts @@ -54,6 +54,6 @@ export class Metagen { run(tgOutput: TypegraphOutput, targetName: string, overwrite?: false) { const items = this.dryRun(tgOutput, targetName, overwrite); - wit_utils.metagenWriteFiles(items); + wit_utils.metagenWriteFiles(items, this.workspacePath); } } diff --git a/typegraph/node/sdk/src/tg_manage.ts b/typegraph/node/sdk/src/tg_manage.ts index 13e023d42d..6aa2fe5246 100644 --- a/typegraph/node/sdk/src/tg_manage.ts +++ b/typegraph/node/sdk/src/tg_manage.ts @@ -39,7 +39,7 @@ export class Manager { finalizationResult = this.#typegraph.serialize({ typegraphPath: env.typegraph_path, prefix: env.prefix, - artifactResolution: true, + artifactResolution: this.#env.artifact_resolution, codegen: false, prismaMigration: { migrationsDir: env.migrations_dir, @@ -65,6 +65,14 @@ export class Manager { const deployData = await rpc.getDeployData(this.#typegraph.name); const env = this.#env; + if (!env.artifact_resolution) { + log.failure({ + typegraph: this.#typegraph.name, + errors: ["artifact resolution must be enabled for deployment"], + }); + return; + } + const params: SerializeParams = { typegraphPath: env.typegraph_path, prefix: env.prefix, diff --git a/typegraph/python/typegraph/envs/cli.py b/typegraph/python/typegraph/envs/cli.py index 9bacaaf611..c44c2614a7 100644 --- a/typegraph/python/typegraph/envs/cli.py +++ b/typegraph/python/typegraph/envs/cli.py @@ -15,6 +15,7 @@ "config_dir", "working_dir", "migrations_dir", + "artifact_resolution", ) _optional_cli_envs = ("prefix",) @@ -35,6 +36,7 @@ class CliEnv: config_dir: str working_dir: str migrations_dir: str + artifact_resolution: bool @classmethod def load(cls) -> Optional["CliEnv"]: @@ -76,6 +78,8 @@ def load(cls) -> Optional["CliEnv"]: filter = raw_filter[len(prefix) :].split(",") d["filter"] = filter + d["artifact_resolution"] = d["artifact_resolution"] == "true" + return cls(**d) diff --git a/typegraph/python/typegraph/graph/metagen.py b/typegraph/python/typegraph/graph/metagen.py index da6df628f7..7571ea6ff7 100644 --- a/typegraph/python/typegraph/graph/metagen.py +++ b/typegraph/python/typegraph/graph/metagen.py @@ -15,9 +15,9 @@ from typegraph.wit import store, wit_utils from os import environ as env -_tg_path = env.get("MCLI_TG_PATH") +_tg_path = env.get("MCLI_TYPEGRAPH_PATH") if _tg_path is None: - raise Exception("MCLI_TG_PATH not set") + raise Exception("MCLI_TYPEGRAPH_PATH not set") serialize_params = SerializeParams( typegraph_path=_tg_path, @@ -78,6 +78,6 @@ def run( overwrite: Union[bool, None] = None, ): items = self.dry_run(tg_output, target_name, overwrite) - res = wit_utils.metagen_write_files(store, items) + res = wit_utils.metagen_write_files(store, items, self.workspace_path) if isinstance(res, Err): raise Exception(res.value) From f677bad4c262515dc4ecea320ee52e71fa8c3591 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Fri, 14 Jun 2024 03:23:28 +0300 Subject: [PATCH 25/35] fix website test --- Cargo.lock | 7 + examples/typegraphs/deno.py | 2 +- examples/typegraphs/faas-runner.ts | 45 ++-- examples/typegraphs/http-runtime.ts | 10 +- examples/typegraphs/metagen-py.py | 34 +++ examples/typegraphs/quick-start-project.py | 11 +- examples/typegraphs/quick-start-project.ts | 43 ++-- libs/common/src/archive.rs | 7 +- typegate/tests/e2e/website/website_test.ts | 226 +++++++++++------- typegraph/core/Cargo.toml | 3 +- typegraph/core/src/conversion/runtimes.rs | 15 +- typegraph/core/src/utils/archive.rs | 9 +- typegraph/node/sdk/src/tg_manage.ts | 11 +- typegraph/python/typegraph/graph/tg_manage.py | 8 +- typegraph/python/typegraph/runtimes/python.py | 2 + 15 files changed, 270 insertions(+), 163 deletions(-) create mode 100644 examples/typegraphs/metagen-py.py diff --git a/Cargo.lock b/Cargo.lock index 68b8f762e8..c9caeaa98b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -12071,6 +12071,7 @@ dependencies = [ "serde 1.0.203", "serde_json", "sha2 0.10.8", + "unindent", "wit-bindgen 0.24.0", ] @@ -12304,6 +12305,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" +[[package]] +name = "unindent" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce" + [[package]] name = "universal-hash" version = "0.5.1" diff --git a/examples/typegraphs/deno.py b/examples/typegraphs/deno.py index c73e0509ef..d03e1bd6be 100644 --- a/examples/typegraphs/deno.py +++ b/examples/typegraphs/deno.py @@ -19,7 +19,7 @@ def deno(g: Graph): t.struct({"n": t.float()}), t.struct({"res": t.integer(), "ms": t.float()}), code=""" - ({ n }) => { + ({ n }) => { let a = 0, b = 1, c; const start = performance.now(); for ( diff --git a/examples/typegraphs/faas-runner.ts b/examples/typegraphs/faas-runner.ts index 44b090bc63..13de160764 100644 --- a/examples/typegraphs/faas-runner.ts +++ b/examples/typegraphs/faas-runner.ts @@ -5,30 +5,31 @@ import { PythonRuntime } from "@typegraph/sdk/runtimes/python.js"; // skip:end -typegraph({ - name: "faas-runner", - // skip:next-line - cors: { allowOrigin: ["https://metatype.dev", "http://localhost:3000"] }, -}, (g) => { - const pub = Policy.public(); +typegraph( + { + name: "faas-runner", + // skip:next-line + cors: { allowOrigin: ["https://metatype.dev", "http://localhost:3000"] }, + }, + (g) => { + const pub = Policy.public(); - const deno = new DenoRuntime(); - const python = new PythonRuntime(); + const deno = new DenoRuntime(); + const python = new PythonRuntime(); - const inp = t.struct({ "n": t.integer({ min: 0, max: 100 }) }); - const out = t.integer(); + const inp = t.struct({ n: t.integer({ min: 0, max: 100 }) }); + const out = t.integer(); - g.expose({ - pycumsum: python.fromLambda(inp, out, { - code: `lambda inp: sum(range(inp["n"]))`, - }), - tscumsum: deno.func( - inp, - out, + g.expose( { - code: - "({n}) => Array.from(Array(5).keys()).reduce((sum, e) => sum + e, 0)", + pycumsum: python.fromLambda(inp, out, { + code: `lambda inp: sum(range(inp['n']))`, + }), + tscumsum: deno.func(inp, out, { + code: "({n}) => Array.from(Array(5).keys()).reduce((sum, e) => sum + e, 0)", + }), }, - ), - }, pub); -}); + pub, + ); + }, +); diff --git a/examples/typegraphs/http-runtime.ts b/examples/typegraphs/http-runtime.ts index ec788fbfac..2009762436 100644 --- a/examples/typegraphs/http-runtime.ts +++ b/examples/typegraphs/http-runtime.ts @@ -8,7 +8,7 @@ import { HttpRuntime } from "@typegraph/sdk/runtimes/http.js"; await typegraph( { - name: "http-example", + name: "http-runtime", // skip:next-line cors: { allowOrigin: ["https://metatype.dev", "http://localhost:3000"] }, }, @@ -33,7 +33,7 @@ await typegraph( }), { path: "/random", - } + }, ), facts_as_text: facts.get( t.struct({ @@ -41,10 +41,10 @@ await typegraph( language: t.enum_(["en", "de"]), }), t.string(), - { path: "/random", headerPrefix: "header_" } + { path: "/random", headerPrefix: "header_" }, ), }, - pub + pub, ); - } + }, ); diff --git a/examples/typegraphs/metagen-py.py b/examples/typegraphs/metagen-py.py new file mode 100644 index 0000000000..de69d7baf1 --- /dev/null +++ b/examples/typegraphs/metagen-py.py @@ -0,0 +1,34 @@ +# skip:start +from typegraph import typegraph, Policy, t, Graph +from typegraph.runtimes.python import PythonRuntime +from typegraph.graph.params import Cors +# skip:end + + +@typegraph( + # skip:start + cors=Cors(allow_origin=["https://metatype.dev", "http://localhost:3000"]), + # skip:end +) +def metagen_py(g: Graph): + idv3 = t.struct( + { + "title": t.string(), + "artist": t.string(), + "releaseTime": t.datetime(), + "mp3Url": t.uri(), + } + ).rename("idv3") + + python = PythonRuntime() + + g.expose( + Policy.public(), + remix=python.import_( + idv3, + idv3, + module="./metagen/py/remix.py", + deps=["./metagen/py/remix_types.py"], + name="remix_track", + ).rename("remix_track"), + ) diff --git a/examples/typegraphs/quick-start-project.py b/examples/typegraphs/quick-start-project.py index b0a0084765..7339e1c806 100644 --- a/examples/typegraphs/quick-start-project.py +++ b/examples/typegraphs/quick-start-project.py @@ -1,7 +1,7 @@ from typegraph import Graph, Policy, t, typegraph from typegraph.graph.params import Cors from typegraph.providers.prisma import PrismaRuntime -from typegraph.runtimes import PythonRuntime +from typegraph.runtimes import PythonRuntime, DenoRuntime @typegraph( @@ -9,11 +9,12 @@ cors=Cors(allow_origin=["https://metatype.dev", "http://localhost:3000"]), # skip:end ) -def example(g: Graph): +def quick_start_project(g: Graph): # access control public = Policy.public() # runtimes + deno = DenoRuntime() python = PythonRuntime() db = PrismaRuntime("database", "POSTGRES") @@ -28,6 +29,11 @@ def example(g: Graph): ) # custom functions + add = deno.func( + t.struct({"first": t.float(), "second": t.float()}), + t.float(), + code="({first, second}) => first + second", + ) hello = python.from_lambda( t.struct({"world": t.string()}), t.string(), @@ -37,6 +43,7 @@ def example(g: Graph): # expose endpoints g.expose( public, + add=add, hello=hello, create_message=db.create(message), list_messages=db.find_many(message), diff --git a/examples/typegraphs/quick-start-project.ts b/examples/typegraphs/quick-start-project.ts index 676fcdfcd5..63a089e2c8 100644 --- a/examples/typegraphs/quick-start-project.ts +++ b/examples/typegraphs/quick-start-project.ts @@ -17,31 +17,36 @@ typegraph( const python = new PythonRuntime(); const db = new PrismaRuntime("database", "POSTGRES"); - // database tables + // types, database tables const message = t.struct( { id: t.integer({}, { asId: true, config: { auto: true } }), // configuring our primary key title: t.string(), body: t.string(), }, - { name: "message" } // the name of our type + { name: "message" }, // the name of our type ); - g.expose({ - add: python - .fromLambda( - t.struct({ first: t.float(), second: t.float() }), - t.float(), - { code: "lambda x: x['first'] + x['second']" } - ) - .withPolicy(pub), - multiply: deno - .func(t.struct({ first: t.float(), second: t.float() }), t.float(), { - code: "({first, second}) => first * second", - }) - .withPolicy(pub), - create_message: db.create(message).withPolicy(pub), - list_messages: db.findMany(message).withPolicy(pub), - }); - } + // custom functions + const add = deno.func( + t.struct({ first: t.float(), second: t.float() }), + t.float(), + { code: "({first, second}) => first + second" }, + ); + const hello = python.fromLambda( + t.struct({ world: t.string() }), + t.string(), + { code: `lambda x: f"Hello {x['world']}!"` }, + ); + + g.expose( + { + add, + hello, + create_message: db.create(message), + list_messages: db.findMany(message), + }, + pub, + ); + }, ); diff --git a/libs/common/src/archive.rs b/libs/common/src/archive.rs index 53abe2fa6b..1aa5f8f5e2 100644 --- a/libs/common/src/archive.rs +++ b/libs/common/src/archive.rs @@ -7,6 +7,7 @@ use flate2::{read::GzDecoder, write::GzEncoder, Compression}; use ignore::{Walk, WalkBuilder}; use indexmap::IndexMap; use std::{ + collections::BTreeMap, fs, io::Read, path::{Path, PathBuf}, @@ -110,15 +111,13 @@ pub fn archive_entries(dir_walker: Walk, prefix: Option<&Path>) -> Result { TaskManager::::create(move |ctx| { let addr = ctx.address(); - { - let addr = addr.downgrade(); - ctrlc::set_handler(move || { - debug!("CTRL-C handler"); - if let Some(addr) = addr.upgrade() { - addr.do_send(Stop); - } else { - std::process::exit(1); - } - }) - .unwrap_or_log(); - } + set_stop_recipient(addr.clone().recipient().downgrade()); let task_generator = TaskGenerator { next_task_id: Arc::new(AtomicUsize::new(1)), diff --git a/meta-cli/src/deploy/actors/task_manager/retry_manager.rs b/meta-cli/src/deploy/actors/task_manager/retry_manager.rs deleted file mode 100644 index 23d9e15ccf..0000000000 --- a/meta-cli/src/deploy/actors/task_manager/retry_manager.rs +++ /dev/null @@ -1,2 +0,0 @@ -// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. -// SPDX-License-Identifier: MPL-2.0 diff --git a/meta-cli/src/deploy/actors/task_manager/signal_handler.rs b/meta-cli/src/deploy/actors/task_manager/signal_handler.rs new file mode 100644 index 0000000000..514f10bcfd --- /dev/null +++ b/meta-cli/src/deploy/actors/task_manager/signal_handler.rs @@ -0,0 +1,44 @@ +// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. +// SPDX-License-Identifier: MPL-2.0 + +use crate::interlude::*; +use actix::WeakRecipient; +use once_cell::sync::Lazy; +use std::sync::Mutex; + +struct CtrlCHandlerData { + stop_recipient: WeakRecipient, +} + +static CTRLC_HANDLER_DATA: Lazy>> = Lazy::new(|| Mutex::new(None)); + +pub fn set_stop_recipient(recipient: WeakRecipient) { + let mut data = CTRLC_HANDLER_DATA.lock().unwrap(); + if let Some(data) = data.as_mut() { + data.stop_recipient = recipient; + } else { + debug!("setting ctrlc handler"); + *data = Some(CtrlCHandlerData { + stop_recipient: recipient, + }); + let res = ctrlc::set_handler(ctrlc_handler); + + #[cfg(debug_assertions)] + res.unwrap_or_log(); + + #[cfg(not(debug_assertions))] + if let Err(e) = res { + error!("failed to set ctrlc handler: {}", e); + } + } +} + +fn ctrlc_handler() { + let data = CTRLC_HANDLER_DATA.lock().unwrap(); + if let Some(data) = data.as_ref() { + if let Some(stop_recipient) = data.stop_recipient.upgrade() { + stop_recipient.do_send(super::message::Stop); + } + } + // else?? +} diff --git a/typegate/tests/metagen/__snapshots__/metagen_test.ts.snap b/typegate/tests/metagen/__snapshots__/metagen_test.ts.snap index 4e289c9d1c..1d3548417c 100644 --- a/typegate/tests/metagen/__snapshots__/metagen_test.ts.snap +++ b/typegate/tests/metagen/__snapshots__/metagen_test.ts.snap @@ -4,176 +4,6 @@ snapshot[`Metagen within sdk 1`] = ` [ [ "0", - { - content: " -mod mdk; -pub use mdk::*; - -/* -init_mat! { - hook: || { - // initialize global stuff here if you need it - MatBuilder::new() - // register function handlers here - .register_handler(stubs::MyFunc::erased(MyMat)) - } -} - -struct MyMat; - -// FIXME: use actual types from your mdk here -impl stubs::MyFunc for MyMat { - fn handle(&self, input: types::MyFuncIn, _cx: Ctx) -> anyhow::Result { - unimplemented!() - } -} -*/ -", - overwrite: false, - path: "./workspace/some/base/path/rust/lib.rs", - }, - ], - [ - "1", - { - content: 'from types import NoneType -from typing import Callable, List, Union, get_origin, ForwardRef, Any -from dataclasses import dataclass, asdict, fields - -FORWARD_REFS = {} - -class Struct(): - def repr(self): - return asdict(self) - - @staticmethod - def try_new(dt_class, val: Any): - # Object - ftypes = {f.name: f.type for f in fields(dt_class)} - attrs = {} - for f in val: - fval = val[f] - ftype = ftypes[f] - serialized = False - # Union - if get_origin(ftype) is Union: - try: - attrs[f] = Struct.try_union(ftype.__args__, fval) - serialized = True - except Exception: - pass - # List - elif get_origin(ftype) is list: - try: - attrs[f] = Struct.try_typed_list(ftype.__args__, fval) - serialized = True - except Exception: - pass - # Any - if not serialized: - if isinstance(ftype, str) and ftype in FORWARD_REFS: - klass = FORWARD_REFS[ftype] - attrs[f] = Struct.new(klass, fval) - else: - attrs[f] = Struct.new(ftype, fval) - return dt_class(**attrs) - - @staticmethod - def try_typed_list(tpe: Any, items: Any): - hint = tpe.__args__[0] - klass = ( - FORWARD_REFS[hint.__forward_arg__] if isinstance(hint, ForwardRef) else hint - ) - return [Struct.new(klass, v) for v in items] - - @staticmethod - def try_union(variants: List[Any], val: Any): - errors = [] - for variant in variants: - try: - if variant is NoneType: - if val is None: - return None - else: - continue - if get_origin(variant) is list: - if isinstance(val, list): - return Struct.try_typed_list(variant, val) - else: - continue - klass = FORWARD_REFS[variant.__forward_arg__] - return Struct.try_new(klass, val) - except Exception as e: - errors.append(str(e)) - raise Exception("\\\\n".join(errors)) - - @staticmethod - def new(dt_class: Any, val: Any): - try: - return Struct.try_new(dt_class, val) - except Exception: - return val - -@dataclass -class Object7(Struct): - name: str - - - -FORWARD_REFS["Object7"] = Object7 -@dataclass -class Student(Struct): - id: int - name: str - peers: Union[List["Student"], None] - - - -FORWARD_REFS["Student"] = Student -@dataclass -class TwoInput(Struct): - name: str - - - -FORWARD_REFS["TwoInput"] = TwoInput -Type8Student = List["Student"] -TypeString6 = str - - -def __repr(value: Any): - if isinstance(value, Struct): - return value.repr() - return value - - -def typed_fnOne(user_fn: Callable[[Object7], Type8Student]): - def exported_wrapper(raw_inp): - inp: Object7 = Struct.new(Object7, raw_inp) - out: Type8Student = user_fn(inp) - if isinstance(out, list): - return [__repr(v) for v in out] - return __repr(out) - return exported_wrapper - - -def typed_fnTwo(user_fn: Callable[[TwoInput], TypeString6]): - def exported_wrapper(raw_inp): - inp: TwoInput = Struct.new(TwoInput, raw_inp) - out: TypeString6 = user_fn(inp) - if isinstance(out, list): - return [__repr(v) for v in out] - return __repr(out) - return exported_wrapper - - -', - overwrite: true, - path: "./workspace/some/base/path/python/same_hit_types.py", - }, - ], - [ - "2", { content: '// This file was @generated by metagen and is intended // to be generated again on subsequent metagen runs. @@ -234,6 +64,51 @@ export type Func20Handler = Handler; path: "./workspace/some/base/path/ts/mdk.ts", }, ], + [ + "1", + { + content: '# - NOTE: only modules that are imported relatively +# are supported. I.e. prefixed by \`.\` or \`..\` +# - Make sure to include any module imports in the \`deps\` +# array when using external modules with PythonRuntime +from .same_hit_types import Object7, TwoInput, TypeString6, typed_fnOne, typed_fnTwo, Type8Student + + +@typed_fnTwo +def fnTwo(inp: TwoInput) -> TypeString6: + # TODO: write your logic here + raise Exception("fnTwo not implemented") + +@typed_fnOne +def fnOne(inp: Object7) -> Type8Student: + # TODO: write your logic here + raise Exception("fnOne not implemented") + +', + overwrite: false, + path: "./workspace/some/base/path/python/same_hit.py", + }, + ], + [ + "2", + { + content: '# - NOTE: only modules that are imported relatively +# are supported. I.e. prefixed by \`.\` or \`..\` +# - Make sure to include any module imports in the \`deps\` +# array when using external modules with PythonRuntime +from .other_types import typed_three, Student, Object7 + + +@typed_three +def three(inp: Object7) -> Student: + # TODO: write your logic here + raise Exception("three not implemented") + +', + overwrite: false, + path: "./workspace/some/base/path/python/other.py", + }, + ], [ "3", { @@ -331,6 +206,15 @@ class Student(Struct): FORWARD_REFS["Student"] = Student +@dataclass +class TwoInput(Struct): + name: str + + + +FORWARD_REFS["TwoInput"] = TwoInput +Type8Student = List["Student"] +TypeString6 = str def __repr(value: Any): @@ -339,10 +223,20 @@ def __repr(value: Any): return value -def typed_three(user_fn: Callable[[Object7], Student]): +def typed_fnTwo(user_fn: Callable[[TwoInput], TypeString6]): + def exported_wrapper(raw_inp): + inp: TwoInput = Struct.new(TwoInput, raw_inp) + out: TypeString6 = user_fn(inp) + if isinstance(out, list): + return [__repr(v) for v in out] + return __repr(out) + return exported_wrapper + + +def typed_fnOne(user_fn: Callable[[Object7], Type8Student]): def exported_wrapper(raw_inp): inp: Object7 = Struct.new(Object7, raw_inp) - out: Student = user_fn(inp) + out: Type8Student = user_fn(inp) if isinstance(out, list): return [__repr(v) for v in out] return __repr(out) @@ -351,56 +245,11 @@ def typed_three(user_fn: Callable[[Object7], Student]): ', overwrite: true, - path: "./workspace/some/base/path/python/other_types.py", + path: "./workspace/some/base/path/python/same_hit_types.py", }, ], [ "4", - { - content: '# - NOTE: only modules that are imported relatively -# are supported. I.e. prefixed by \`.\` or \`..\` -# - Make sure to include any module imports in the \`deps\` -# array when using external modules with PythonRuntime -from .other_types import typed_three, Object7, Student - - -@typed_three -def three(inp: Object7) -> Student: - # TODO: write your logic here - raise Exception("three not implemented") - -', - overwrite: false, - path: "./workspace/some/base/path/python/other.py", - }, - ], - [ - "5", - { - content: 'package.name = "example_metagen_mdk" -package.edition = "2021" -package.version = "0.0.1" - -[lib] -path = "lib.rs" -crate-type = ["cdylib", "rlib"] - -[dependencies] -anyhow = "1.0.86" -serde = { version = "1.0.203", features = ["derive"] } -serde_json = "1.0.117" -wit-bindgen = "0.26.0" - - -[profile.release] -strip = "symbols" -opt-level = "z"', - overwrite: false, - path: "./workspace/some/base/path/rust/Cargo.toml", - }, - ], - [ - "6", { content: \`// This file was @generated by metagen and is intended // to be generated again on subsequent metagen runs. @@ -717,68 +566,7 @@ pub mod stubs { }, ], [ - "7", - { - content: '# - NOTE: only modules that are imported relatively -# are supported. I.e. prefixed by \`.\` or \`..\` -# - Make sure to include any module imports in the \`deps\` -# array when using external modules with PythonRuntime -from .same_hit_types import Type8Student, Object7, typed_fnOne, TwoInput, TypeString6, typed_fnTwo - - -@typed_fnOne -def fnOne(inp: Object7) -> Type8Student: - # TODO: write your logic here - raise Exception("fnOne not implemented") - -@typed_fnTwo -def fnTwo(inp: TwoInput) -> TypeString6: - # TODO: write your logic here - raise Exception("fnTwo not implemented") - -', - overwrite: false, - path: "./workspace/some/base/path/python/same_hit.py", - }, - ], -] -`; - -snapshot[`Metagen within sdk 2`] = ` -[ - [ - "0", - { - content: " -mod mdk; -pub use mdk::*; - -/* -init_mat! { - hook: || { - // initialize global stuff here if you need it - MatBuilder::new() - // register function handlers here - .register_handler(stubs::MyFunc::erased(MyMat)) - } -} - -struct MyMat; - -// FIXME: use actual types from your mdk here -impl stubs::MyFunc for MyMat { - fn handle(&self, input: types::MyFuncIn, _cx: Ctx) -> anyhow::Result { - unimplemented!() - } -} -*/ -", - overwrite: false, - path: "./workspace/some/base/path/rust/lib.rs", - }, - ], - [ - "1", + "5", { content: 'from types import NoneType from typing import Callable, List, Union, get_origin, ForwardRef, Any @@ -874,15 +662,6 @@ class Student(Struct): FORWARD_REFS["Student"] = Student -@dataclass -class TwoInput(Struct): - name: str - - - -FORWARD_REFS["TwoInput"] = TwoInput -Type8Student = List["Student"] -TypeString6 = str def __repr(value: Any): @@ -891,20 +670,10 @@ def __repr(value: Any): return value -def typed_fnOne(user_fn: Callable[[Object7], Type8Student]): +def typed_three(user_fn: Callable[[Object7], Student]): def exported_wrapper(raw_inp): inp: Object7 = Struct.new(Object7, raw_inp) - out: Type8Student = user_fn(inp) - if isinstance(out, list): - return [__repr(v) for v in out] - return __repr(out) - return exported_wrapper - - -def typed_fnTwo(user_fn: Callable[[TwoInput], TypeString6]): - def exported_wrapper(raw_inp): - inp: TwoInput = Struct.new(TwoInput, raw_inp) - out: TypeString6 = user_fn(inp) + out: Student = user_fn(inp) if isinstance(out, list): return [__repr(v) for v in out] return __repr(out) @@ -913,11 +682,72 @@ def typed_fnTwo(user_fn: Callable[[TwoInput], TypeString6]): ', overwrite: true, - path: "./workspace/some/base/path/python/same_hit_types.py", + path: "./workspace/some/base/path/python/other_types.py", }, ], [ - "2", + "6", + { + content: " +mod mdk; +pub use mdk::*; + +/* +init_mat! { + hook: || { + // initialize global stuff here if you need it + MatBuilder::new() + // register function handlers here + .register_handler(stubs::MyFunc::erased(MyMat)) + } +} + +struct MyMat; + +// FIXME: use actual types from your mdk here +impl stubs::MyFunc for MyMat { + fn handle(&self, input: types::MyFuncIn, _cx: Ctx) -> anyhow::Result { + unimplemented!() + } +} +*/ +", + overwrite: false, + path: "./workspace/some/base/path/rust/lib.rs", + }, + ], + [ + "7", + { + content: 'package.name = "example_metagen_mdk" +package.edition = "2021" +package.version = "0.0.1" + +[lib] +path = "lib.rs" +crate-type = ["cdylib", "rlib"] + +[dependencies] +anyhow = "1.0.86" +serde = { version = "1.0.203", features = ["derive"] } +serde_json = "1.0.117" +wit-bindgen = "0.26.0" + + +[profile.release] +strip = "symbols" +opt-level = "z"', + overwrite: false, + path: "./workspace/some/base/path/rust/Cargo.toml", + }, + ], +] +`; + +snapshot[`Metagen within sdk 2`] = ` +[ + [ + "0", { content: '// This file was @generated by metagen and is intended // to be generated again on subsequent metagen runs. @@ -970,12 +800,57 @@ export type TwoInput = { }; -export type Func18Handler = Handler; -export type Func19Handler = Handler; -export type Func20Handler = Handler; +export type Func18Handler = Handler; +export type Func19Handler = Handler; +export type Func20Handler = Handler; +', + overwrite: true, + path: "./workspace/some/base/path/ts/mdk.ts", + }, + ], + [ + "1", + { + content: '# - NOTE: only modules that are imported relatively +# are supported. I.e. prefixed by \`.\` or \`..\` +# - Make sure to include any module imports in the \`deps\` +# array when using external modules with PythonRuntime +from .same_hit_types import Object7, TwoInput, TypeString6, typed_fnOne, typed_fnTwo, Type8Student + + +@typed_fnTwo +def fnTwo(inp: TwoInput) -> TypeString6: + # TODO: write your logic here + raise Exception("fnTwo not implemented") + +@typed_fnOne +def fnOne(inp: Object7) -> Type8Student: + # TODO: write your logic here + raise Exception("fnOne not implemented") + +', + overwrite: false, + path: "./workspace/some/base/path/python/same_hit.py", + }, + ], + [ + "2", + { + content: '# - NOTE: only modules that are imported relatively +# are supported. I.e. prefixed by \`.\` or \`..\` +# - Make sure to include any module imports in the \`deps\` +# array when using external modules with PythonRuntime +from .other_types import typed_three, Student, Object7 + + +@typed_three +def three(inp: Object7) -> Student: + # TODO: write your logic here + raise Exception("three not implemented") + ', - overwrite: true, - path: "./workspace/some/base/path/ts/mdk.ts", + overwrite: false, + path: "./workspace/some/base/path/python/other.py", }, ], [ @@ -1075,6 +950,15 @@ class Student(Struct): FORWARD_REFS["Student"] = Student +@dataclass +class TwoInput(Struct): + name: str + + + +FORWARD_REFS["TwoInput"] = TwoInput +Type8Student = List["Student"] +TypeString6 = str def __repr(value: Any): @@ -1083,10 +967,20 @@ def __repr(value: Any): return value -def typed_three(user_fn: Callable[[Object7], Student]): +def typed_fnTwo(user_fn: Callable[[TwoInput], TypeString6]): + def exported_wrapper(raw_inp): + inp: TwoInput = Struct.new(TwoInput, raw_inp) + out: TypeString6 = user_fn(inp) + if isinstance(out, list): + return [__repr(v) for v in out] + return __repr(out) + return exported_wrapper + + +def typed_fnOne(user_fn: Callable[[Object7], Type8Student]): def exported_wrapper(raw_inp): inp: Object7 = Struct.new(Object7, raw_inp) - out: Student = user_fn(inp) + out: Type8Student = user_fn(inp) if isinstance(out, list): return [__repr(v) for v in out] return __repr(out) @@ -1095,56 +989,11 @@ def typed_three(user_fn: Callable[[Object7], Student]): ', overwrite: true, - path: "./workspace/some/base/path/python/other_types.py", + path: "./workspace/some/base/path/python/same_hit_types.py", }, ], [ "4", - { - content: '# - NOTE: only modules that are imported relatively -# are supported. I.e. prefixed by \`.\` or \`..\` -# - Make sure to include any module imports in the \`deps\` -# array when using external modules with PythonRuntime -from .other_types import typed_three, Object7, Student - - -@typed_three -def three(inp: Object7) -> Student: - # TODO: write your logic here - raise Exception("three not implemented") - -', - overwrite: false, - path: "./workspace/some/base/path/python/other.py", - }, - ], - [ - "5", - { - content: 'package.name = "example_metagen_mdk" -package.edition = "2021" -package.version = "0.0.1" - -[lib] -path = "lib.rs" -crate-type = ["cdylib", "rlib"] - -[dependencies] -anyhow = "1.0.86" -serde = { version = "1.0.203", features = ["derive"] } -serde_json = "1.0.117" -wit-bindgen = "0.26.0" - - -[profile.release] -strip = "symbols" -opt-level = "z"', - overwrite: false, - path: "./workspace/some/base/path/rust/Cargo.toml", - }, - ], - [ - "6", { content: \`// This file was @generated by metagen and is intended // to be generated again on subsequent metagen runs. @@ -1461,28 +1310,179 @@ pub mod stubs { }, ], [ - "7", + "5", { - content: '# - NOTE: only modules that are imported relatively -# are supported. I.e. prefixed by \`.\` or \`..\` -# - Make sure to include any module imports in the \`deps\` -# array when using external modules with PythonRuntime -from .same_hit_types import Type8Student, Object7, typed_fnOne, TwoInput, TypeString6, typed_fnTwo + content: 'from types import NoneType +from typing import Callable, List, Union, get_origin, ForwardRef, Any +from dataclasses import dataclass, asdict, fields +FORWARD_REFS = {} -@typed_fnOne -def fnOne(inp: Object7) -> Type8Student: - # TODO: write your logic here - raise Exception("fnOne not implemented") +class Struct(): + def repr(self): + return asdict(self) + + @staticmethod + def try_new(dt_class, val: Any): + # Object + ftypes = {f.name: f.type for f in fields(dt_class)} + attrs = {} + for f in val: + fval = val[f] + ftype = ftypes[f] + serialized = False + # Union + if get_origin(ftype) is Union: + try: + attrs[f] = Struct.try_union(ftype.__args__, fval) + serialized = True + except Exception: + pass + # List + elif get_origin(ftype) is list: + try: + attrs[f] = Struct.try_typed_list(ftype.__args__, fval) + serialized = True + except Exception: + pass + # Any + if not serialized: + if isinstance(ftype, str) and ftype in FORWARD_REFS: + klass = FORWARD_REFS[ftype] + attrs[f] = Struct.new(klass, fval) + else: + attrs[f] = Struct.new(ftype, fval) + return dt_class(**attrs) + + @staticmethod + def try_typed_list(tpe: Any, items: Any): + hint = tpe.__args__[0] + klass = ( + FORWARD_REFS[hint.__forward_arg__] if isinstance(hint, ForwardRef) else hint + ) + return [Struct.new(klass, v) for v in items] + + @staticmethod + def try_union(variants: List[Any], val: Any): + errors = [] + for variant in variants: + try: + if variant is NoneType: + if val is None: + return None + else: + continue + if get_origin(variant) is list: + if isinstance(val, list): + return Struct.try_typed_list(variant, val) + else: + continue + klass = FORWARD_REFS[variant.__forward_arg__] + return Struct.try_new(klass, val) + except Exception as e: + errors.append(str(e)) + raise Exception("\\\\n".join(errors)) + + @staticmethod + def new(dt_class: Any, val: Any): + try: + return Struct.try_new(dt_class, val) + except Exception: + return val + +@dataclass +class Object7(Struct): + name: str + + + +FORWARD_REFS["Object7"] = Object7 +@dataclass +class Student(Struct): + id: int + name: str + peers: Union[List["Student"], None] + + + +FORWARD_REFS["Student"] = Student + + +def __repr(value: Any): + if isinstance(value, Struct): + return value.repr() + return value + + +def typed_three(user_fn: Callable[[Object7], Student]): + def exported_wrapper(raw_inp): + inp: Object7 = Struct.new(Object7, raw_inp) + out: Student = user_fn(inp) + if isinstance(out, list): + return [__repr(v) for v in out] + return __repr(out) + return exported_wrapper -@typed_fnTwo -def fnTwo(inp: TwoInput) -> TypeString6: - # TODO: write your logic here - raise Exception("fnTwo not implemented") ', + overwrite: true, + path: "./workspace/some/base/path/python/other_types.py", + }, + ], + [ + "6", + { + content: " +mod mdk; +pub use mdk::*; + +/* +init_mat! { + hook: || { + // initialize global stuff here if you need it + MatBuilder::new() + // register function handlers here + .register_handler(stubs::MyFunc::erased(MyMat)) + } +} + +struct MyMat; + +// FIXME: use actual types from your mdk here +impl stubs::MyFunc for MyMat { + fn handle(&self, input: types::MyFuncIn, _cx: Ctx) -> anyhow::Result { + unimplemented!() + } +} +*/ +", overwrite: false, - path: "./workspace/some/base/path/python/same_hit.py", + path: "./workspace/some/base/path/rust/lib.rs", + }, + ], + [ + "7", + { + content: 'package.name = "example_metagen_mdk" +package.edition = "2021" +package.version = "0.0.1" + +[lib] +path = "lib.rs" +crate-type = ["cdylib", "rlib"] + +[dependencies] +anyhow = "1.0.86" +serde = { version = "1.0.203", features = ["derive"] } +serde_json = "1.0.117" +wit-bindgen = "0.26.0" + + +[profile.release] +strip = "symbols" +opt-level = "z"', + overwrite: false, + path: "./workspace/some/base/path/rust/Cargo.toml", }, ], ] diff --git a/typegate/tests/metagen/metagen_test.ts b/typegate/tests/metagen/metagen_test.ts index 306fa7e7bd..4808d293e1 100644 --- a/typegate/tests/metagen/metagen_test.ts +++ b/typegate/tests/metagen/metagen_test.ts @@ -3,17 +3,18 @@ import { Meta } from "../utils/mod.ts"; import { join } from "std/path/join.ts"; +import { resolve } from "std/path/resolve.ts"; import { assertEquals } from "std/assert/mod.ts"; import { GraphQLQuery } from "../utils/query/graphql_query.ts"; import { JSONValue } from "../../src/utils.ts"; +import { testDir } from "../utils/dir.ts"; + +const denoJson = resolve(testDir, "../deno.jsonc"); Meta.test("metagen rust builds", async (t) => { const tmpDir = t.tempDir; - const typegraphPath = join( - import.meta.dirname!, - "./typegraphs/metagen.mjs", - ); + const typegraphPath = join(import.meta.dirname!, "./typegraphs/metagen.mjs"); const genCratePath = join(tmpDir, "mdk"); await Deno.writeTextFile( @@ -46,27 +47,31 @@ members = ["mdk/"] `, ); assertEquals( - (await Meta.cli({ - env: { - MCLI_LOADER_CMD: "deno run -A --config ../deno.jsonc", - RUST_BACKTRACE: "1", - }, - }, ...`-C ${tmpDir} gen`.split(" "))).code, + ( + await Meta.cli( + { + env: { + MCLI_LOADER_CMD: `deno run -A --config ${denoJson}`, + RUST_BACKTRACE: "1", + }, + }, + ...`-C ${tmpDir} gen`.split(" "), + ) + ).code, 0, ); assertEquals( - (await t.shell("cargo build --target wasm32-wasi".split(" "), { - currentDir: genCratePath, - })).code, + ( + await t.shell("cargo build --target wasm32-wasi".split(" "), { + currentDir: genCratePath, + }) + ).code, 0, ); }); Meta.test("metagen python runs on cyclic types", async (t) => { - const typegraphPath = join( - import.meta.dirname!, - "typegraphs/python.py", - ); + const typegraphPath = join(import.meta.dirname!, "typegraphs/python.py"); const basePath = join(t.tempDir, "mdk"); Deno.writeTextFile( @@ -88,8 +93,7 @@ metagen: ); assertEquals( - (await Meta.cli({}, ...`-C ${t.tempDir} gen my_target`.split(" "))) - .code, + (await Meta.cli({}, ...`-C ${t.tempDir} gen my_target`.split(" "))).code, 0, ); }); @@ -138,10 +142,7 @@ Meta.test("Metagen within sdk", async (t) => { }); await t.should("Run metagen within python", async () => { - const typegraphPath = join( - import.meta.dirname!, - "./typegraphs/metagen.py", - ); + const typegraphPath = join(import.meta.dirname!, "./typegraphs/metagen.py"); const command = new Deno.Command("python3", { args: [typegraphPath], env: { @@ -156,7 +157,8 @@ Meta.test("Metagen within sdk", async (t) => { const child = command.spawn(); const output = await child.output(); if (output.success) { - const generated = JSON.parse(new TextDecoder().decode(output.stdout)); + const stdout = new TextDecoder().decode(output.stdout); + const generated = JSON.parse(stdout); await t.assertSnapshot( Object.entries(generated).sort(([keyA], [keyB]) => keyA.localeCompare(keyB) @@ -179,20 +181,22 @@ Meta.test("Metagen within sdk", async (t) => { }); Meta.test("metagen table suite", async (metaTest) => { - const scriptsPath = join( - import.meta.dirname!, - "typegraphs/identities", - ); + const scriptsPath = join(import.meta.dirname!, "typegraphs/identities"); const genCratePath = join(scriptsPath, "rs"); // const genPyPath = join(scriptsPath, "py"); // const genTsPath = join(scriptsPath, "ts"); assertEquals( - (await Meta.cli({ - env: { - // RUST_BACKTRACE: "1", - }, - }, ...`-C ${scriptsPath} gen`.split(" "))).code, + ( + await Meta.cli( + { + env: { + // RUST_BACKTRACE: "1", + }, + }, + ...`-C ${scriptsPath} gen`.split(" "), + ) + ).code, 0, ); const compositesQuery = `query ($data: composites) { @@ -204,7 +208,7 @@ Meta.test("metagen table suite", async (metaTest) => { ... on branch2 { branch2 } - ... on primitives{ + ... on primitives{ str enum uuid @@ -274,18 +278,25 @@ Meta.test("metagen table suite", async (metaTest) => { } }`, vars: { - data: { // cycles 1 - to2: { //cycles 2 + data: { + // cycles 1 + to2: { + //cycles 2 phantom3a: "phantom", - to1: { // cycles2/variant cycle3 + to1: { + // cycles2/variant cycle3 // cycles1 - list3: [{ //cycles3 - to2: { // cycles2 - // cycles2/variant cycles1 - to2: null, - phantom1: "phantom", + list3: [ + { + //cycles3 + to2: { + // cycles2 + // cycles2/variant cycles1 + to2: null, + phantom1: "phantom", + }, }, - }], + ], }, }, } as Record, @@ -398,18 +409,18 @@ Meta.test("metagen table suite", async (metaTest) => { await metaTest.should("build rust crate", async () => { assertEquals( - (await metaTest.shell("bash build.sh".split(" "), { - currentDir: genCratePath, - })).code, + ( + await metaTest.shell("bash build.sh".split(" "), { + currentDir: genCratePath, + }) + ).code, 0, ); }); await using engine = await metaTest.engine( "metagen/typegraphs/identities.py", ); - for ( - const prefix of ["rs", "ts", "py"] - ) { + for (const prefix of ["rs", "ts", "py"]) { await metaTest.should(`mdk data go round ${prefix}`, async (t) => { for (const { name, vars, query, skip } of cases) { if (skip) { @@ -424,9 +435,8 @@ Meta.test("metagen table suite", async (metaTest) => { [], ) .withVars(vars) - .expectData( - vars, - ).on(engine); + .expectData(vars) + .on(engine); }); } }); diff --git a/typegate/tests/metagen/typegraphs/identities/py/handlers_types.py b/typegate/tests/metagen/typegraphs/identities/py/handlers_types.py index 5edcc04262..04033ee4fa 100644 --- a/typegate/tests/metagen/typegraphs/identities/py/handlers_types.py +++ b/typegate/tests/metagen/typegraphs/identities/py/handlers_types.py @@ -117,7 +117,7 @@ class CompositesArgs(Struct): class Composites(Struct): opt: Union[str, None] either: Union["Primitives", "Branch2"] - union: Union[List[str], str, int] + union: Union[int, List[str], str] list: List[str] @@ -143,7 +143,7 @@ class Cycles1Args(Struct): @dataclass class Cycles1(Struct): phantom1: Union[str, None] - to2: Union[Union[Union["Branch33A", "Branch33B"], "Cycles1"], None] + to2: Union[Union[Union["Branch33B", "Branch33A"], "Cycles1"], None] list3: Union[List[Union["Branch33A", "Branch33B"]], None] @@ -162,7 +162,7 @@ class Branch33A(Struct): @dataclass class Branch33B(Struct): phantom3b: Union[str, None] - to2: Union[Union[Union["Branch33A", "Branch33B"], "Cycles1"], None] + to2: Union[Union["Cycles1", Union["Branch33B", "Branch33A"]], None] FORWARD_REFS["Branch33B"] = Branch33B @@ -209,10 +209,10 @@ def __repr(value: Any): return value -def typed_simple_cycles(user_fn: Callable[[SimpleCycles1Args], SimpleCycles1]): +def typed_primitives(user_fn: Callable[[PrimitivesArgs], Primitives]): def exported_wrapper(raw_inp): - inp: SimpleCycles1Args = Struct.new(SimpleCycles1Args, raw_inp) - out: SimpleCycles1 = user_fn(inp) + inp: PrimitivesArgs = Struct.new(PrimitivesArgs, raw_inp) + out: Primitives = user_fn(inp) if isinstance(out, list): return [__repr(v) for v in out] return __repr(out) @@ -220,10 +220,10 @@ def exported_wrapper(raw_inp): return exported_wrapper -def typed_cycles(user_fn: Callable[[Cycles1Args], Cycles1]): +def typed_simple_cycles(user_fn: Callable[[SimpleCycles1Args], SimpleCycles1]): def exported_wrapper(raw_inp): - inp: Cycles1Args = Struct.new(Cycles1Args, raw_inp) - out: Cycles1 = user_fn(inp) + inp: SimpleCycles1Args = Struct.new(SimpleCycles1Args, raw_inp) + out: SimpleCycles1 = user_fn(inp) if isinstance(out, list): return [__repr(v) for v in out] return __repr(out) @@ -231,10 +231,10 @@ def exported_wrapper(raw_inp): return exported_wrapper -def typed_primitives(user_fn: Callable[[PrimitivesArgs], Primitives]): +def typed_composites(user_fn: Callable[[CompositesArgs], Composites]): def exported_wrapper(raw_inp): - inp: PrimitivesArgs = Struct.new(PrimitivesArgs, raw_inp) - out: Primitives = user_fn(inp) + inp: CompositesArgs = Struct.new(CompositesArgs, raw_inp) + out: Composites = user_fn(inp) if isinstance(out, list): return [__repr(v) for v in out] return __repr(out) @@ -242,10 +242,10 @@ def exported_wrapper(raw_inp): return exported_wrapper -def typed_composites(user_fn: Callable[[CompositesArgs], Composites]): +def typed_cycles(user_fn: Callable[[Cycles1Args], Cycles1]): def exported_wrapper(raw_inp): - inp: CompositesArgs = Struct.new(CompositesArgs, raw_inp) - out: Composites = user_fn(inp) + inp: Cycles1Args = Struct.new(Cycles1Args, raw_inp) + out: Cycles1 = user_fn(inp) if isinstance(out, list): return [__repr(v) for v in out] return __repr(out) diff --git a/typegate/tests/metagen/typegraphs/metagen.py b/typegate/tests/metagen/typegraphs/metagen.py index 53192e52d2..889734cecc 100644 --- a/typegate/tests/metagen/typegraphs/metagen.py +++ b/typegate/tests/metagen/typegraphs/metagen.py @@ -41,7 +41,7 @@ def example_metagen(g: Graph): ) -tg = example_metagen() +tg = example_metagen workspace_path = getenv("workspace_path") target_name = getenv("target_name") gen_config = json.loads(getenv("gen_config")) diff --git a/typegraph/node/sdk/src/metagen.ts b/typegraph/node/sdk/src/metagen.ts index 357bb3aa0e..2a4b3ab954 100644 --- a/typegraph/node/sdk/src/metagen.ts +++ b/typegraph/node/sdk/src/metagen.ts @@ -4,30 +4,12 @@ import { SerializeParams } from "./gen/interfaces/metatype-typegraph-core.js"; import { TypegraphOutput } from "./typegraph.js"; import { wit_utils } from "./wit.js"; -import { freezeTgOutput, getEnvVariable } from "./utils/func_utils.js"; +import { freezeTgOutput } from "./utils/func_utils.js"; import { MdkConfig, MdkOutput, } from "./gen/interfaces/metatype-typegraph-utils.js"; -const serializeParams = { - // TODO env variable key constants.js - typegraphPath: getEnvVariable("MCLI_TG_PATH")!, - prefix: undefined, - artifactResolution: false, - codegen: true, - prismaMigration: { - migrationsDir: "prisma-migrations", - migrationActions: [], - defaultMigrationAction: { - apply: false, - create: false, - reset: false, - }, - }, - pretty: false, -} satisfies SerializeParams; - export class Metagen { constructor( private workspacePath: string, @@ -35,6 +17,22 @@ export class Metagen { ) {} private getMdkConfig(tgOutput: TypegraphOutput, targetName: string) { + const serializeParams = { + typegraphPath: `${this.workspacePath}/tg.ts`, + prefix: undefined, + artifactResolution: false, + codegen: true, + prismaMigration: { + migrationsDir: "prisma-migrations", + migrationActions: [], + defaultMigrationAction: { + apply: false, + create: false, + reset: false, + }, + }, + pretty: false, + } satisfies SerializeParams; const frozenOut = freezeTgOutput(serializeParams, tgOutput); return { configJson: JSON.stringify(this.genConfig), diff --git a/typegraph/python/typegraph/graph/metagen.py b/typegraph/python/typegraph/graph/metagen.py index 7571ea6ff7..bf14cddfbc 100644 --- a/typegraph/python/typegraph/graph/metagen.py +++ b/typegraph/python/typegraph/graph/metagen.py @@ -13,26 +13,6 @@ from typegraph.graph.shared_types import TypegraphOutput from typegraph.utils import freeze_tg_output from typegraph.wit import store, wit_utils -from os import environ as env - -_tg_path = env.get("MCLI_TYPEGRAPH_PATH") -if _tg_path is None: - raise Exception("MCLI_TYPEGRAPH_PATH not set") - -serialize_params = SerializeParams( - typegraph_path=_tg_path, - prefix=None, - artifact_resolution=False, - codegen=True, - prisma_migration=PrismaMigrationConfig( - migrations_dir="prisma-migrations", - migration_actions=[], - default_migration_action=MigrationAction( - apply=False, create=False, reset=False - ), - ), - pretty=False, -) class Metagen: @@ -48,6 +28,21 @@ def _get_mdk_config( tg_output: TypegraphOutput, target_name: str, ) -> MdkConfig: + serialize_params = SerializeParams( + typegraph_path=self.workspace_path + "/tg.py", + prefix=None, + artifact_resolution=False, + codegen=True, + prisma_migration=PrismaMigrationConfig( + migrations_dir="prisma-migrations", + migration_actions=[], + default_migration_action=MigrationAction( + apply=False, create=False, reset=False + ), + ), + pretty=False, + ) + frozen_out = freeze_tg_output(serialize_params, tg_output) return MdkConfig( tg_json=frozen_out.serialize(serialize_params).tgJson, diff --git a/typegraph/python/typegraph/graph/tg_manage.py b/typegraph/python/typegraph/graph/tg_manage.py index cab068d355..64ae695ed8 100644 --- a/typegraph/python/typegraph/graph/tg_manage.py +++ b/typegraph/python/typegraph/graph/tg_manage.py @@ -45,7 +45,7 @@ def serialize(self): params = SerializeParams( typegraph_path=env.typegraph_path, prefix=env.prefix, - artifact_resolution=True, + artifact_resolution=env.artifact_resolution, codegen=False, prisma_migration=PrismaMigrationConfig( migrations_dir=self.get_migrations_dir(), From df6a52f8090ab707bea3162b19cc0dd93773edca Mon Sep 17 00:00:00 2001 From: Natoandro Date: Fri, 14 Jun 2024 20:00:46 +0300 Subject: [PATCH 27/35] display debugging info --- libs/metagen/src/tests/fixtures.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/libs/metagen/src/tests/fixtures.rs b/libs/metagen/src/tests/fixtures.rs index 6145cafaad..64f1bfeef7 100644 --- a/libs/metagen/src/tests/fixtures.rs +++ b/libs/metagen/src/tests/fixtures.rs @@ -7,7 +7,7 @@ use common::typegraph::*; pub async fn test_typegraph_1() -> anyhow::Result> { let out = tokio::process::Command::new("cargo") .args( - "run -p meta-cli -- serialize -f tests/tg.ts" + "run -p meta-cli -- serialize -f tests/tg.ts -vvv" // "run -p meta-cli -- serialize -f ../../examples/typegraphs/reduce.py" .split(' ') .collect::>(), @@ -15,8 +15,12 @@ pub async fn test_typegraph_1() -> anyhow::Result> { .kill_on_drop(true) .output() .await?; - let mut tg: Vec> = serde_json::from_slice(&out.stdout) - .with_context(|| format!("error deserializing typegraph: {out:?}"))?; + let mut tg: Vec> = serde_json::from_slice(&out.stdout).with_context(|| { + format!( + "error deserializing typegraph: {out:?}\nstderr):\n{}\n---END---", + std::str::from_utf8(&out.stderr).unwrap(), + ) + })?; Ok(tg.pop().unwrap()) } From 9abbe0c6abbd56b21f1b8b9ffefcb3ccedbf5343 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Sat, 15 Jun 2024 02:10:38 +0300 Subject: [PATCH 28/35] upload tokens --- typegate/src/services/artifact_service.ts | 53 +++-- typegate/src/typegate/artifacts/local.ts | 32 ++- typegate/src/typegate/artifacts/mod.ts | 50 ++--- typegate/src/typegate/artifacts/shared.ts | 34 ++- typegate/tests/artifacts/artifacts_test.ts | 205 ++++++++++-------- typegraph/node/sdk/src/tg_artifact_upload.ts | 35 ++- .../typegraph/graph/tg_artifact_upload.py | 34 ++- 7 files changed, 209 insertions(+), 234 deletions(-) diff --git a/typegate/src/services/artifact_service.ts b/typegate/src/services/artifact_service.ts index fff1133ef6..916d93afe7 100644 --- a/typegate/src/services/artifact_service.ts +++ b/typegate/src/services/artifact_service.ts @@ -2,6 +2,7 @@ // SPDX-License-Identifier: Elastic-2.0 import { + ArtifactMeta, artifactMetaSchema, ArtifactStore, } from "../typegate/artifacts/mod.ts"; @@ -11,7 +12,7 @@ import { BaseError, UnknownError } from "../errors.ts"; const logger = getLogger(import.meta); -const getUploadUrlBodySchema = z.array(artifactMetaSchema); +const prepareUploadBodySchema = z.array(artifactMetaSchema); export class ArtifactService { constructor(private store: ArtifactStore) {} @@ -21,7 +22,7 @@ export class ArtifactService { // [1] is the typegraph name; [2] is the service name const operation = url.pathname.split("/")[3]; - if (operation === "upload-urls") { + if (operation === "prepare-upload") { if (request.method !== "POST") { logger.warn("Method not allowed: {}", request.method); return new Response(JSON.stringify({ error: "method not allowed" }), { @@ -30,9 +31,9 @@ export class ArtifactService { }); } - let metaList; + let metaList: Array; try { - metaList = getUploadUrlBodySchema.parse(await request.json()); + metaList = prepareUploadBodySchema.parse(await request.json()); } catch (error) { logger.error("Failed to parse data: {}", error); return new Response( @@ -45,11 +46,7 @@ export class ArtifactService { } try { - const data = await this.#createUploadUrls( - metaList, - tgName, - new URL(request.url).origin, - ); + const data = await this.#createUploadTokens(metaList, tgName); return new Response(JSON.stringify(data), { headers: { "Content-Type": "application/json" }, }); @@ -77,30 +74,38 @@ export class ArtifactService { }); } - return await this.#handleUpload(url, request.body!, tgName); + const token = url.searchParams.get("token"); + + if (!token) { + logger.warn("Missing upload token"); + return new Response(JSON.stringify({ error: "missing token" }), { + status: 403, + headers: { "Content-Type": "application/json" }, + }); + } + + return await this.#handleUpload(token, request.body!, tgName); } - #createUploadUrls( - items: Array>, - tgName: string, - origin: string, - ) { - return Promise.all(items.map(async (meta) => { - if (meta.typegraphName !== tgName) { - throw new Error("Typegraph name mismatch"); - } - return await this.store.prepareUpload(meta, new URL(origin)); - })); + #createUploadTokens(items: Array, tgName: string) { + return Promise.all( + items.map(async (meta) => { + if (meta.typegraphName !== tgName) { + throw new Error("Typegraph name mismatch"); + } + return await this.store.prepareUpload(meta); + }), + ); } async #handleUpload( - url: URL, + token: string, stream: ReadableStream, tgName: string, ) { - let meta; + let meta: ArtifactMeta; try { - meta = await this.store.takeUploadUrl(url); + meta = await this.store.takeArtifactMeta(token); } catch (e) { if (e instanceof BaseError) { return e.toResponse(); diff --git a/typegate/src/typegate/artifacts/local.ts b/typegate/src/typegate/artifacts/local.ts index 47cdbe999d..ebffae9aa7 100644 --- a/typegate/src/typegate/artifacts/local.ts +++ b/typegate/src/typegate/artifacts/local.ts @@ -21,9 +21,13 @@ import { BaseError, ErrorKind } from "@typegate/errors.ts"; const logger = getLogger(import.meta); -class UnknownUploadUrl extends BaseError { - constructor(url: URL) { - super(import.meta, ErrorKind.User, `Unknown upload URL: ${url.toString()}`); +class InvalidUploadToken extends BaseError { + constructor(token: string) { + super( + import.meta, + ErrorKind.User, + `Unknown upload token: ${token.toString()}`, + ); } } @@ -126,32 +130,22 @@ class LocalUploadEndpointManager implements UploadEndpointManager { await Promise.resolve(void null); } - async prepareUpload( - meta: ArtifactMeta, - origin: URL, - persistence: ArtifactPersistence, - ) { + async prepareUpload(meta: ArtifactMeta, persistence: ArtifactPersistence) { if (await persistence.has(meta.hash)) { return null; } - const url = await ArtifactStore.createUploadUrl( - origin, - meta.typegraphName, - this.expireSec, - ); - const token = url.searchParams.get("token")!; + const token = await ArtifactStore.createUploadToken(this.expireSec); this.#mapToMeta.set(token, meta); this.#expirationQueue.push([token, jwt.getNumericDate(this.expireSec)]); - return url.toString(); + return token; } - async takeUploadUrl(url: URL) { - const token = await ArtifactStore.validateUploadUrl(url); - + async takeArtifactMeta(token: string) { + await ArtifactStore.validateUploadToken(token); const meta = this.#mapToMeta.get(token); if (!meta) { - throw new UnknownUploadUrl(url); + throw new InvalidUploadToken(token); } this.#mapToMeta.delete(token); diff --git a/typegate/src/typegate/artifacts/mod.ts b/typegate/src/typegate/artifacts/mod.ts index b896b1ec6c..c3587e57b4 100644 --- a/typegate/src/typegate/artifacts/mod.ts +++ b/typegate/src/typegate/artifacts/mod.ts @@ -12,14 +12,9 @@ import { exists } from "std/fs/exists.ts"; import { AsyncDisposableStack } from "dispose"; import { BaseError, ErrorKind, NotImplemented } from "@typegate/errors.ts"; -class InvalidUploadUrl extends BaseError { - constructor(url: URL, kind: "unknown" | "expired" = "unknown") { - super( - import.meta, - ErrorKind.User, - `${kind} upload URL: ${url.toString()}`, - 403, - ); +class InvalidUploadToken extends BaseError { + constructor(token: string, kind: "unknown" | "expired" = "unknown") { + super(import.meta, ErrorKind.User, `${kind} upload token: ${token}`, 403); } } @@ -37,10 +32,6 @@ export interface Dirs { artifacts: string; } -function getUploadPath(tgName: string) { - return `/${tgName}/artifacts`; -} - async function getLocalParentDir( entrypoint: ArtifactMeta, deps: ArtifactMeta[], @@ -100,10 +91,9 @@ export interface ArtifactPersistence extends AsyncDisposable { export interface UploadEndpointManager extends AsyncDisposable { prepareUpload( meta: ArtifactMeta, - origin: URL, persistence: ArtifactPersistence, ): Promise; - takeUploadUrl(url: URL): Promise; + takeArtifactMeta(token: string): Promise; } export class ArtifactStore implements AsyncDisposable { @@ -201,17 +191,17 @@ export class ArtifactStore implements AsyncDisposable { return this.#resolveLocalPath(meta, parentDirName); } - prepareUpload(meta: ArtifactMeta, origin: URL) { - return this.uploadEndpoints.prepareUpload(meta, origin, this.persistence); + prepareUpload(meta: ArtifactMeta) { + return this.uploadEndpoints.prepareUpload(meta, this.persistence); } - takeUploadUrl(url: URL) { - return this.uploadEndpoints.takeUploadUrl(url); + takeArtifactMeta(token: string) { + return this.uploadEndpoints.takeArtifactMeta(token); } /** unique identifier for an artifact (file content) */ static getArtifactKey(meta: ArtifactMeta) { - // TODO what happens on cache collision? + // TODO what happens on hash collision? return meta.hash; } @@ -220,30 +210,16 @@ export class ArtifactStore implements AsyncDisposable { * @param origin The origin of the request. * @returns The URL to upload the artifact to and the expiration time. */ - static async createUploadUrl( - origin: URL, - tgName: string, - expireSec: number, - ): Promise { + static async createUploadToken(expireSec: number): Promise { const uuid = crypto.randomUUID(); - const token = await signJWT({ uuid, expiresIn: expireSec }, expireSec); - const url = new URL(getUploadPath(tgName), origin); - url.searchParams.set("token", token); - return url; + return await signJWT({ uuid, expiresIn: expireSec }, expireSec); } - static async validateUploadUrl(url: URL) { - const token = url.searchParams.get("token"); - if (/^\/([^\/])\/artifacts/.test(url.pathname) || !token) { - throw new InvalidUploadUrl(url); - } - + static async validateUploadToken(token: string) { const context = await verifyJWT(token); if ((context.exp as number) < jwt.getNumericDate(new Date())) { - throw new InvalidUploadUrl(url, "expired"); + throw new InvalidUploadToken(token, "expired"); } - - return token; } } diff --git a/typegate/src/typegate/artifacts/shared.ts b/typegate/src/typegate/artifacts/shared.ts index 3c7052348b..d78d92d011 100644 --- a/typegate/src/typegate/artifacts/shared.ts +++ b/typegate/src/typegate/artifacts/shared.ts @@ -25,14 +25,14 @@ export interface RemoteUploadUrlStore { redisClient: Redis; } -function getRedisUploadUrlKey(token: string) { +// TODO change to 'typegate:artifacts:metadata:' +function getRedisArtifactMetaKey(token: string) { return `typegate:artifacts:upload-urls:${token}`; } function serializeToRedisValue(value: T): string { return JSON.stringify(value); } - function deserializeToCustom(value: string): T { return JSON.parse(value) as T; } @@ -100,9 +100,9 @@ class SharedArtifactPersistence implements ArtifactPersistence { const tmpFile = await Deno.makeTempFile({ dir: this.dirs.temp }); const file = await Deno.open(tmpFile, { write: true, truncate: true }); - await stream.pipeThrough(new HashTransformStream(hasher)).pipeTo( - file.writable, - ); + await stream + .pipeThrough(new HashTransformStream(hasher)) + .pipeTo(file.writable); const hash = hasher.digest("hex"); const body = await Deno.readFile(tmpFile); @@ -176,7 +176,10 @@ class SharedUploadEndpointManager implements UploadEndpointManager { return new SharedUploadEndpointManager(redis, expireSec); } - private constructor(private redis: Redis, private expireSec: number) {} + private constructor( + private redis: Redis, + private expireSec: number, + ) {} async [Symbol.asyncDispose]() { await this.redis.quit(); @@ -184,7 +187,6 @@ class SharedUploadEndpointManager implements UploadEndpointManager { async prepareUpload( meta: ArtifactMeta, - origin: URL, persistence: ArtifactPersistence, ): Promise { // should not be uploaded again @@ -192,34 +194,28 @@ class SharedUploadEndpointManager implements UploadEndpointManager { return null; } - const url = await ArtifactStore.createUploadUrl( - origin, - meta.typegraphName, - this.expireSec, - ); - const token = url.searchParams.get("token")!; + const token = await ArtifactStore.createUploadToken(this.expireSec); const _ = await this.redis.eval( /* lua */ ` redis.call('SET', KEYS[1], ARGV[1]) redis.call('EXPIRE', KEYS[1], ARGV[2]) `, - [getRedisUploadUrlKey(token)], + [getRedisArtifactMetaKey(token)], [serializeToRedisValue(meta), this.expireSec], ); - return url.toString(); + return token; } - async takeUploadUrl(url: URL): Promise { - const token = await ArtifactStore.validateUploadUrl(url); - + async takeArtifactMeta(token: string): Promise { + await ArtifactStore.validateUploadToken(token); const meta = await this.redis.eval( /* lua */ ` local meta = redis.call('GET', KEYS[1]) redis.call('DEL', KEYS[1]) return meta `, - [getRedisUploadUrlKey(token)], + [getRedisArtifactMetaKey(token)], [], ); return Promise.resolve(deserializeToCustom(meta as string)); diff --git a/typegate/tests/artifacts/artifacts_test.ts b/typegate/tests/artifacts/artifacts_test.ts index 7b1cc079cf..ea61fe4680 100644 --- a/typegate/tests/artifacts/artifacts_test.ts +++ b/typegate/tests/artifacts/artifacts_test.ts @@ -45,7 +45,7 @@ async function cleanUp() { } const variants = [ - { nameSuffix: "" }, + { mode: "default" }, { syncConfig, async setup() { @@ -54,7 +54,7 @@ const variants = [ async teardown() { await cleanUp(); }, - nameSuffix: " (sync)", + mode: "sync", }, ] as const; @@ -73,103 +73,116 @@ async function hasArtifact(t: MetaTest, hash: string, sync: boolean) { } } -for (const { nameSuffix, ...options } of variants) { - Meta.test({ - name: "Upload protocol" + nameSuffix, - ...options, - }, async (t) => { - const e = await t.engine("runtimes/deno/deno.py"); - const artifacts = e.tg.tg.meta.artifacts; - - await t.should("have uploaded artifacts on deploy", async () => { - for (const [_, meta] of Object.entries(artifacts)) { - const typedMeta = meta as { hash: string }; - assert(await hasArtifact(t, typedMeta.hash, "syncConfig" in options)); - } - }); - - await t.undeploy(e.name); - - await t.should("have removed artifacts on undeploy", async () => { - for (const [_, meta] of Object.entries(artifacts)) { - const typedMeta = meta as { hash: string }; - assertFalse( - await hasArtifact(t, typedMeta.hash, "syncConfig" in options), - ); - } - }); - }); - - Meta.test({ - name: "Upload protocol: tg_deploy (NodeJs SDK)" + nameSuffix, - ...options, - }, async (_t) => { - // TODO - }); - - Meta.test({ - name: "Upload protocol: tg_deploy (Python SDK)" + nameSuffix, - ...options, - }, async (t) => { - const e = await t.engine( - "runtimes/deno/deno.py", - ); - const artifacts = e.tg.tg.meta.artifacts; - - await t.should("have uploaded artifacts on deploy", async () => { - for (const [_, meta] of Object.entries(artifacts)) { - const typedMeta = meta as { hash: string }; - assert(await hasArtifact(t, typedMeta.hash, "syncConfig" in options)); - } - }); - - await t.undeploy(e.name); - - await t.should("have removed artifacts on undeploy", async () => { - for (const [_, meta] of Object.entries(artifacts)) { - const typedMeta = meta as { hash: string }; - assertFalse( - await hasArtifact(t, typedMeta.hash, "syncConfig" in options), - ); - } - }); - }); - - Meta.test({ - name: "Artifact GC: shared artifacts" + nameSuffix, - ...options, - }, async (t) => { - const engine = await t.engine("runtimes/deno/deno.py"); - const artifacts = engine.tg.tg.meta.artifacts; - - const enginePartial = await t.engine("runtimes/deno/deno_partial.py"); - const sharedArtifacts = Object.keys(enginePartial.tg.tg.meta.artifacts) - .filter((art) => art in artifacts); - - await t.undeploy(engine.name); - - await t.should("have removed shared artifacts", async () => { - for (const [art, meta] of Object.entries(artifacts)) { - const typedMeta = meta as { hash: string }; - if (sharedArtifacts.includes(art)) { +for (const { mode, ...options } of variants) { + Meta.test( + { + name: `Upload protocol (${mode} mode)`, + ...options, + }, + async (t) => { + const e = await t.engine("runtimes/deno/deno.py"); + const artifacts = e.tg.tg.meta.artifacts; + + await t.should("have uploaded artifacts on deploy", async () => { + for (const [_, meta] of Object.entries(artifacts)) { + const typedMeta = meta as { hash: string }; + assert(await hasArtifact(t, typedMeta.hash, "syncConfig" in options)); + } + }); + + await t.undeploy(e.name); + + await t.should("have removed artifacts on undeploy", async () => { + for (const [_, meta] of Object.entries(artifacts)) { + const typedMeta = meta as { hash: string }; + assertFalse( + await hasArtifact(t, typedMeta.hash, "syncConfig" in options), + ); + } + }); + }, + ); + + Meta.test( + { + name: `Upload protocol: tg_deploy (NodeJs SDK) (${mode} mode)`, + ...options, + }, + async (_t) => { + // TODO + }, + ); + + Meta.test( + { + name: `Upload protocol: tg_deploy (Python SDK) (${mode} mode)`, + ...options, + }, + async (t) => { + const e = await t.engine("runtimes/deno/deno.py"); + const artifacts = e.tg.tg.meta.artifacts; + + await t.should("have uploaded artifacts on deploy", async () => { + for (const [_, meta] of Object.entries(artifacts)) { + const typedMeta = meta as { hash: string }; assert(await hasArtifact(t, typedMeta.hash, "syncConfig" in options)); - } else { + } + }); + + await t.undeploy(e.name); + + await t.should("have removed artifacts on undeploy", async () => { + for (const [_, meta] of Object.entries(artifacts)) { + const typedMeta = meta as { hash: string }; + assertFalse( + await hasArtifact(t, typedMeta.hash, "syncConfig" in options), + ); + } + }); + }, + ); + + Meta.test( + { + name: `Artifact GC: shared artifacts (${mode} mode)`, + ...options, + }, + async (t) => { + const engine = await t.engine("runtimes/deno/deno.py"); + const artifacts = engine.tg.tg.meta.artifacts; + + const enginePartial = await t.engine("runtimes/deno/deno_partial.py"); + const sharedArtifacts = Object.keys( + enginePartial.tg.tg.meta.artifacts, + ).filter((art) => art in artifacts); + + await t.undeploy(engine.name); + + await t.should("have removed shared artifacts", async () => { + for (const [art, meta] of Object.entries(artifacts)) { + const typedMeta = meta as { hash: string }; + if (sharedArtifacts.includes(art)) { + assert( + await hasArtifact(t, typedMeta.hash, "syncConfig" in options), + ); + } else { + assertFalse( + await hasArtifact(t, typedMeta.hash, "syncConfig" in options), + ); + } + } + }); + + await t.undeploy(enginePartial.name); + + await t.should("have removed all artifacts", async () => { + for (const [_, meta] of Object.entries(artifacts)) { + const typedMeta = meta as { hash: string }; assertFalse( await hasArtifact(t, typedMeta.hash, "syncConfig" in options), ); } - } - }); - - await t.undeploy(enginePartial.name); - - await t.should("have removed all artifacts", async () => { - for (const [_, meta] of Object.entries(artifacts)) { - const typedMeta = meta as { hash: string }; - assertFalse( - await hasArtifact(t, typedMeta.hash, "syncConfig" in options), - ); - } - }); - }); + }); + }, + ); } diff --git a/typegraph/node/sdk/src/tg_artifact_upload.ts b/typegraph/node/sdk/src/tg_artifact_upload.ts index 9752960d15..74ff07db1e 100644 --- a/typegraph/node/sdk/src/tg_artifact_upload.ts +++ b/typegraph/node/sdk/src/tg_artifact_upload.ts @@ -16,26 +16,21 @@ interface UploadArtifactMeta { } export class ArtifactUploader { - private getUploadUrl: URL; - constructor( - baseUrl: string, + private baseUrl: string, private refArtifacts: Artifact[], private tgName: string, private auth: BasicAuth | undefined, private headers: Headers, private tgPath: string, - ) { - const suffix = `${tgName}/artifacts/upload-urls`; - this.getUploadUrl = new URL(suffix, baseUrl); - } + ) {} - private async fetchUploadUrls( + private async getUploadTokens( artifactMetas: UploadArtifactMeta[], ): Promise> { const artifactsJson = JSON.stringify(artifactMetas); const uploadUrls: Array = await execRequest( - this.getUploadUrl, + new URL(`${this.tgName}/artifacts/prepare-upload`, this.baseUrl), { method: "POST", headers: this.headers, @@ -63,7 +58,7 @@ export class ArtifactUploader { } private async upload( - url: string | null, + token: string | null, meta: UploadArtifactMeta, ): Promise { const uploadHeaders = new Headers({ @@ -74,22 +69,20 @@ export class ArtifactUploader { uploadHeaders.append("Authorization", this.auth.asHeaderValue()); } - if (url == null) { + if (token == null) { log.info("skipping artifact upload:", meta.relativePath); return; } - const urlObj = new URL(this.getUploadUrl); - const altUrlObj = new URL(url); - urlObj.pathname = altUrlObj.pathname; - urlObj.search = altUrlObj.search; + const uploadUrl = new URL(`${this.tgName}/artifacts`, this.baseUrl); + uploadUrl.searchParams.set("token", token); const path = join(dirname(this.tgPath), meta.relativePath); // TODO: stream const content = await fsp.readFile(path); - log.debug("uploading artifact", meta.relativePath, urlObj.href); + log.debug("uploading artifact", meta.relativePath, uploadUrl.href); const res = await execRequest( - urlObj, + uploadUrl, { method: "POST", headers: uploadHeaders, @@ -140,11 +133,11 @@ export class ArtifactUploader { async uploadArtifacts(): Promise { const artifactMetas = this.getMetas(this.refArtifacts); - const uploadUrls = await this.fetchUploadUrls(artifactMetas); - log.debug("upload urls", uploadUrls); + const tokens = await this.getUploadTokens(artifactMetas); + log.debug("upload urls", tokens); const results = await Promise.allSettled( - uploadUrls.map(async (url, i) => { - return await this.upload(url, artifactMetas[i]); + tokens.map(async (token, i) => { + return await this.upload(token, artifactMetas[i]); }), ); diff --git a/typegraph/python/typegraph/graph/tg_artifact_upload.py b/typegraph/python/typegraph/graph/tg_artifact_upload.py index 03414130bf..a907b4b011 100644 --- a/typegraph/python/typegraph/graph/tg_artifact_upload.py +++ b/typegraph/python/typegraph/graph/tg_artifact_upload.py @@ -26,7 +26,6 @@ class UploadArtifactMeta: class ArtifactUploader: base_url: str artifacts: List[Artifact] - get_upload_url: str tg_name: str auth: Union[BasicAuth, None] headers: Dict[str, str] @@ -44,20 +43,21 @@ def __init__( self.base_url = base_url self.artifacts = artifacts self.tg_name = tg_name - sep = "/" if not base_url.endswith("/") else "" - self.get_upload_url = base_url + sep + tg_name + "/artifacts/upload-urls" self.auth = auth self.headers = headers self.tg_path = tg_path - def __fetch_upload_urls( + def __get_upload_tokens( self, artifact_metas: List[UploadArtifactMeta], ) -> List[str]: artifacts_objs = [vars(meta) for meta in artifact_metas] artifacts_json = json.dumps(artifacts_objs, indent=4).encode() + + sep = "/" if not self.base_url.endswith("/") else "" + url = self.base_url + sep + self.tg_name + "/artifacts/prepare-upload" req = request.Request( - url=self.get_upload_url, + url=url, method="POST", headers=self.headers, data=artifacts_json, @@ -77,7 +77,7 @@ def __fetch_upload_urls( def __upload( self, - url: str, + token: str, meta: UploadArtifactMeta, ) -> Result[Any, Err]: upload_headers = {"Content-Type": "application/octet-stream"} @@ -86,7 +86,7 @@ def __upload( upload_headers["Authorization"] = self.auth.as_header_value() Log.debug("upload headers", upload_headers) - if url is None: + if token is None: Log.info("skipping artifact upload:", meta.relativePath) return Ok(None) @@ -98,24 +98,22 @@ def __upload( with open(path, "rb") as file: content = file.read() - # TODO temporary - parsed_upload_url = Url.urlparse(url) - parsed_url = Url.urlparse(self.base_url) - parsed_url = parsed_url._replace( - path=parsed_upload_url.path, query=parsed_upload_url.query + base_url = Url.urlparse(self.base_url) + sep = "/" if not base_url.path.endswith("/") else "" + upload_url = base_url._replace( + path=base_url.path + sep + self.tg_name + "/artifacts", + query=Url.urlencode({"token": token}), ) + upload_url = Url.urlunparse(upload_url) - rebased_url = Url.urlunparse(parsed_url) - - Log.debug("uploading artifact", meta.relativePath, rebased_url) + Log.debug("uploading artifact", meta.relativePath, upload_url) upload_req = request.Request( - url=rebased_url, + url=upload_url, method="POST", data=content, headers=upload_headers, ) try: - Log.debug("uploading artifact", meta.relativePath, str(rebased_url)) response = request.urlopen(upload_req) except HTTPError as e: Log.error("failed to upload artifact", meta.relativePath, e) @@ -165,7 +163,7 @@ def upload_artifacts( ) -> Result[None, Err]: artifact_metas = self.get_metas(self.artifacts) - upload_urls = self.__fetch_upload_urls(artifact_metas) + upload_urls = self.__get_upload_tokens(artifact_metas) Log.debug("upload urls", upload_urls) results = [] From e8c9e863381e9ef933c2ea20ac57debb675901f8 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Mon, 17 Jun 2024 10:34:13 +0300 Subject: [PATCH 29/35] fix e2e dev_test.ts --- meta-cli/src/deploy/actors/watcher.rs | 2 +- typegate/tests/e2e/cli/dev_test.ts | 2 +- typegraph/core/src/utils/archive.rs | 10 ++++++---- typegraph/core/src/utils/postprocess/prisma_rt.rs | 2 +- 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/meta-cli/src/deploy/actors/watcher.rs b/meta-cli/src/deploy/actors/watcher.rs index abd1be4f42..18ec7c5926 100644 --- a/meta-cli/src/deploy/actors/watcher.rs +++ b/meta-cli/src/deploy/actors/watcher.rs @@ -181,7 +181,7 @@ impl Handler for WatcherActor { RetryManager::clear_counter(&path); self.task_manager.do_send(task_manager::message::AddTask { - task_ref: self.task_generator.generate(path.into(), 0), + task_ref: self.task_generator.generate(rel_path.into(), 0), reason: TaskReason::FileChanged, }); } diff --git a/typegate/tests/e2e/cli/dev_test.ts b/typegate/tests/e2e/cli/dev_test.ts index 0b12b073d1..f5008540a3 100644 --- a/typegate/tests/e2e/cli/dev_test.ts +++ b/typegate/tests/e2e/cli/dev_test.ts @@ -98,7 +98,7 @@ Meta.test( }); await metadev.fetchStderrLines((line) => { - console.log("line:", line); + // console.log("line:", line); return !$.stripAnsi(line).includes( "successfully deployed typegraph migration-failure-test", ); diff --git a/typegraph/core/src/utils/archive.rs b/typegraph/core/src/utils/archive.rs index cbd64edf8c..156773d771 100644 --- a/typegraph/core/src/utils/archive.rs +++ b/typegraph/core/src/utils/archive.rs @@ -8,7 +8,7 @@ use common::archive::{ use std::{collections::BTreeMap, path::Path}; pub trait ArchiveExt { - fn compress_and_encode(&self, path: &Path) -> Result; + fn compress_and_encode(&self, path: impl AsRef) -> Result; fn unpack_base64(&self, tarb64: &str, dest: &Path) -> Result<(), String>; } @@ -35,7 +35,9 @@ impl FsContext { } impl ArchiveExt for FsContext { - fn compress_and_encode(&self, path: &Path) -> Result { + fn compress_and_encode(&self, path: impl AsRef) -> Result { + let path = path.as_ref(); + crate::logger::debug!("compress_and_encode: {path:?}"); let ignore = { let tg_ignore_path = Path::new(".tgignore"); let mut ignore = self.load_tg_ignore(tg_ignore_path)?; @@ -48,8 +50,8 @@ impl ArchiveExt for FsContext { let entries = paths .iter() .map(|p| { - self.read_file(p) - .map(|content| (p.to_string_lossy().into(), content)) + let key = p.strip_prefix(path).unwrap().to_string_lossy().into(); + self.read_file(p).map(|content| (key, content)) }) .collect::, _>>()?; diff --git a/typegraph/core/src/utils/postprocess/prisma_rt.rs b/typegraph/core/src/utils/postprocess/prisma_rt.rs index 2b9cf38898..71fa50cd56 100644 --- a/typegraph/core/src/utils/postprocess/prisma_rt.rs +++ b/typegraph/core/src/utils/postprocess/prisma_rt.rs @@ -44,7 +44,7 @@ impl PrismaProcessor { migration_files: { if action.apply { match fs_ctx.exists(&path)? { - true => Some(fs_ctx.compress_and_encode(&path)?), + true => Some(fs_ctx.compress_and_encode(rt_name)?), false => None, } } else { From eaf371953cc4e134ad9dc440079bafa0221ce973 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Mon, 17 Jun 2024 10:42:13 +0300 Subject: [PATCH 30/35] update snapshot for typegraph_test --- .../__snapshots__/typegraph_test.ts.snap | 22 +++++-------------- 1 file changed, 6 insertions(+), 16 deletions(-) diff --git a/typegate/tests/e2e/typegraph/__snapshots__/typegraph_test.ts.snap b/typegate/tests/e2e/typegraph/__snapshots__/typegraph_test.ts.snap index 5d4e293f3a..a7993322a2 100644 --- a/typegate/tests/e2e/typegraph/__snapshots__/typegraph_test.ts.snap +++ b/typegate/tests/e2e/typegraph/__snapshots__/typegraph_test.ts.snap @@ -398,8 +398,8 @@ snapshot[`typegraphs creation 2`] = ` "idempotent": true }, "data": { - "name": "sha256_ca4c84007bbe1e2253363df3d2da8605fc21e36c4c280cbb335b7daf5ce0dc73", - "fn": "(lambda x: (x['first'] + x['second']))" + "name": "sha256_5ac5ebaa386435a02b0a96c48af3bf5d26e4d9ce29e5c9b54660b88b6dc3eea6", + "fn": "lambda x: (x['first'] + x['second'])" } }, { @@ -684,13 +684,8 @@ snapshot[`typegraphs creation 3`] = ` "idempotent": true }, "data": { - "denoArtifact": { - "path": "scripts/three.ts", - "hash": "564fe4792102c50aac9801faeb3c6402c49b1f7c7cbb22dc6d54886e45cfa3b2", - "size": 307 - }, - "deps": [], - "depsMeta": [] + "entryPoint": "scripts/three.ts", + "deps": [] } }, { @@ -1443,13 +1438,8 @@ snapshot[`typegraphs creation 6`] = ` "idempotent": true }, "data": { - "denoArtifact": { - "path": "scripts/three.ts", - "hash": "564fe4792102c50aac9801faeb3c6402c49b1f7c7cbb22dc6d54886e45cfa3b2", - "size": 307 - }, - "deps": [], - "depsMeta": [] + "entryPoint": "scripts/three.ts", + "deps": [] } }, { From 66a21b135bda3174b8bde6ad4f525e30b175f6fd Mon Sep 17 00:00:00 2001 From: Natoandro Date: Mon, 17 Jun 2024 12:27:08 +0300 Subject: [PATCH 31/35] (ci/debug) try deployment with `pnpm dlx tsx` --- libs/metagen/src/tests/fixtures.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/libs/metagen/src/tests/fixtures.rs b/libs/metagen/src/tests/fixtures.rs index 64f1bfeef7..6e2c608439 100644 --- a/libs/metagen/src/tests/fixtures.rs +++ b/libs/metagen/src/tests/fixtures.rs @@ -12,6 +12,7 @@ pub async fn test_typegraph_1() -> anyhow::Result> { .split(' ') .collect::>(), ) + .env("MCLI_LOADER_CMD", "pnpm dlx tsx") .kill_on_drop(true) .output() .await?; From 778e3bc1700df3b33d82d4651cfea5545807b29e Mon Sep 17 00:00:00 2001 From: Natoandro Date: Mon, 17 Jun 2024 12:55:59 +0300 Subject: [PATCH 32/35] update actor-model diagram --- meta-cli/src/deploy/actor-model.drawio.svg | 269 ++++++++++----------- meta-cli/src/deploy/actors/task_manager.rs | 4 +- meta-cli/src/deploy/actors/watcher.rs | 1 + 3 files changed, 138 insertions(+), 136 deletions(-) diff --git a/meta-cli/src/deploy/actor-model.drawio.svg b/meta-cli/src/deploy/actor-model.drawio.svg index 26ec8bcac1..519c721b07 100644 --- a/meta-cli/src/deploy/actor-model.drawio.svg +++ b/meta-cli/src/deploy/actor-model.drawio.svg @@ -1,11 +1,11 @@ - + - + -
+
@@ -15,16 +15,16 @@
- + Watch-mode only (deploy command) - + -
+
@@ -36,17 +36,17 @@
- + Console... - - + + -
+
@@ -59,16 +59,16 @@
- + messages:... - + -
+
@@ -80,16 +80,16 @@
- + Discovery... - + -
+
@@ -101,16 +101,41 @@
- + TaskManager... - + + -
+
+
+
+ + messages + + : +
+ AddTask +
+ Restart +
+
+
+ + + messages:... + + + + + + + +
@@ -122,17 +147,17 @@
- + Watcher... - - + + -
+
Typegate @@ -140,17 +165,17 @@
- + Typegate - - + + -
+
@@ -160,16 +185,16 @@
- + Ctrl-C handler - + -
+
@@ -179,17 +204,17 @@
- + Filesystem - - + + -
+
Unpack migrations @@ -197,16 +222,16 @@
- + Unpack migrations - + -
+
@@ -218,66 +243,16 @@
- + Receives messages from... - - - - - -
-
-
- - messages - - : -
- AddTask -
- Restart -
-
-
-
- - messages:... - -
-
- - + -
-
-
- - messages - - : -
- AddTask -
- Restart -
-
-
-
- - messages:... - -
-
- - - - -
+
@@ -289,16 +264,16 @@
- + Task... - + -
+
@@ -310,57 +285,67 @@
- + JS/Python process... - - + + -
+
- start process + + messages + + :
- pass settings to env + Stop
- - start process... + + messages:... - - + + -
+
- output: stdout + start + + Task + + actor
- logging; status + + messages + + : Stop
- - output: stdout... + + start Task actor... - - + + -
+
@@ -368,65 +353,79 @@ :
- Stop + TaskFinished
- + messages:... - - + + -
+
-
- start +
- Task + message - actor
+ Restults +
+
+
+ + + message... + + + + + + + +
+
+
- messages + TaskIO - : Stop +
+ actor
- - start Task actor... + + TaskIO...
- - + + + -
+
-
- messages: -
- ScheduleRetry -
- Finished +
+ stdout/stdin
- - messages:... + + stdout/stdin + diff --git a/meta-cli/src/deploy/actors/task_manager.rs b/meta-cli/src/deploy/actors/task_manager.rs index 8f2ef722d5..ec3e9d376f 100644 --- a/meta-cli/src/deploy/actors/task_manager.rs +++ b/meta-cli/src/deploy/actors/task_manager.rs @@ -266,7 +266,8 @@ impl Actor for TaskManager { match &self.stop_reason { Some(reason) => { if matches!(reason, StopReason::Restart) { - self.init_params + self.watcher_addr = self + .init_params .start_source(ctx.address(), self.task_generator.clone()); Running::Continue } else { @@ -433,6 +434,7 @@ impl Handler for TaskManager { fn handle(&mut self, _msg: Stop, ctx: &mut Context) -> Self::Result { if let Some(watcher) = &self.watcher_addr { + // This might be unnecessary, it will be stopped when the address is dropped. watcher.do_send(super::watcher::message::Stop); } match self.stop_reason.clone() { diff --git a/meta-cli/src/deploy/actors/watcher.rs b/meta-cli/src/deploy/actors/watcher.rs index 18ec7c5926..d579e0b481 100644 --- a/meta-cli/src/deploy/actors/watcher.rs +++ b/meta-cli/src/deploy/actors/watcher.rs @@ -20,6 +20,7 @@ use std::{sync::Arc, time::Duration}; pub mod message { use super::*; + // TODO remove #[derive(Message)] #[rtype(result = "()")] pub struct Stop; From d2d4d751e464e455484f25a0ce65a8e6a3d4317b Mon Sep 17 00:00:00 2001 From: Natoandro Date: Mon, 17 Jun 2024 15:20:12 +0300 Subject: [PATCH 33/35] remove com module --- meta-cli/src/com/mod.rs | 5 - meta-cli/src/com/responses.rs | 31 ------ meta-cli/src/com/store.rs | 194 ---------------------------------- meta-cli/src/main.rs | 1 - 4 files changed, 231 deletions(-) delete mode 100644 meta-cli/src/com/mod.rs delete mode 100644 meta-cli/src/com/responses.rs delete mode 100644 meta-cli/src/com/store.rs diff --git a/meta-cli/src/com/mod.rs b/meta-cli/src/com/mod.rs deleted file mode 100644 index d0fd96a3c8..0000000000 --- a/meta-cli/src/com/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. -// SPDX-License-Identifier: MPL-2.0 - -pub mod responses; -pub mod store; diff --git a/meta-cli/src/com/responses.rs b/meta-cli/src/com/responses.rs deleted file mode 100644 index e3dfea0ed7..0000000000 --- a/meta-cli/src/com/responses.rs +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. -// SPDX-License-Identifier: MPL-2.0 -use crate::interlude::*; - -use super::store::Command; -use serde_json::Value; - -// CLI => SDK - -#[derive(Serialize)] -pub struct CLIResponseSuccess { - pub data: Value, -} - -#[derive(Serialize)] -pub struct CLIResponseError { - pub error: String, -} - -// SDK => CLI - -#[derive(Deserialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] -pub struct SDKResponse { - pub command: Command, - pub typegraph_name: String, - pub typegraph_path: PathBuf, - /// Payload from the SDK (serialized typegraph, response from typegate) - pub data: Option, - pub error: Option, -} diff --git a/meta-cli/src/com/store.rs b/meta-cli/src/com/store.rs deleted file mode 100644 index 04d1e1b351..0000000000 --- a/meta-cli/src/com/store.rs +++ /dev/null @@ -1,194 +0,0 @@ -// Copyright Metatype OÜ, licensed under the Mozilla Public License Version 2.0. -// SPDX-License-Identifier: MPL-2.0 -use crate::interlude::*; - -use crate::{config::Config, secrets::Secrets}; -use common::node::BasicAuth; -use lazy_static::lazy_static; -use serde::{Deserialize, Serialize}; -use std::borrow::{Borrow, BorrowMut}; -use std::sync::Mutex; - -use super::responses::SDKResponse; - -lazy_static! { - #[derive(Debug)] - pub static ref STORE: Mutex = Mutex::new(Default::default()); -} - -fn with_store T>(f: F) -> T { - let guard = STORE.lock().unwrap(); - f(guard.borrow()) -} - -fn with_store_mut T>(f: F) -> T { - let mut guard = STORE.lock().unwrap(); - f(guard.borrow_mut()) -} - -#[allow(dead_code)] -#[derive(Serialize, Clone, Debug, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum Command { - Deploy, - Serialize, -} - -#[derive(Default, Clone, Debug)] -pub struct Endpoint { - pub typegate: String, - pub auth: Option, -} - -#[derive(Default, Serialize, Clone, Debug)] -pub struct MigrationAction { - pub reset: bool, - pub create: bool, -} - -#[derive(Default, Serialize, Clone, Debug)] -pub struct RuntimeMigrationAction { - pub runtime_name: String, - pub action: MigrationAction, -} - -#[derive(Default, Debug)] -pub struct ServerStore { - config: Option, - command: Option, - /// default (all) - migration_action_glob: MigrationAction, - /// 1 typegraph => n runtimes - migration_action: HashMap>>, - secrets: Secrets, - endpoint: Endpoint, - prefix: Option, - sdk_responses: HashMap>>, - artifact_resolution: Option, - codegen: Option, -} - -#[allow(dead_code)] -impl ServerStore { - pub fn with(command: Option, config: Option) { - with_store_mut(|s| { - s.config = config; - s.command = command; - }) - } - - pub fn set_config(config: Config) { - with_store_mut(|s| s.config = Some(config)) - } - - pub fn get_config() -> Option { - with_store(|s| s.config.to_owned()) - } - - pub fn get_command() -> Option { - with_store(|s| s.command.clone()) - } - - pub fn set_secrets(secrets: Secrets) { - with_store_mut(|s| s.secrets = secrets) - } - - pub fn get_secrets(tg_name: &str) -> HashMap { - with_store(|s| s.secrets.get(tg_name)) - } - - pub fn set_endpoint(endpoint: Endpoint) { - with_store_mut(|s| s.endpoint = endpoint) - } - - pub fn get_endpoint() -> Endpoint { - with_store(|s| s.endpoint.clone()) - } - - pub fn add_response(response: SDKResponse) { - with_store_mut(|s| { - let mut name_to_res = s - .sdk_responses - .get(&response.typegraph_path) - .map(|v| v.as_ref().to_owned()) - .unwrap_or_default(); - - name_to_res.insert(response.typegraph_name.clone(), response.clone()); - - s.sdk_responses - .insert(response.typegraph_path.clone(), name_to_res.into()); - }) - } - - pub fn get_responses(tg_path: &PathBuf) -> Option>> { - with_store(|s| s.sdk_responses.get(tg_path).map(|v| v.to_owned())) - } - - pub fn get_responses_or_fail(tg_path: &PathBuf) -> Result>> { - match Self::get_responses(tg_path) { - Some(res) => Ok(res.to_owned()), - None => bail!("invalid state, no response was sent by {tg_path:?}, this could be the result of an outdated sdk"), - } - } - - pub fn set_migration_action_glob(option: MigrationAction) { - with_store_mut(|s| s.migration_action_glob = option) - } - - pub fn get_migration_action_glob() -> MigrationAction { - with_store(|s| s.migration_action_glob.to_owned()) - } - - pub fn set_migration_action(tg_path: PathBuf, rt_migration: RuntimeMigrationAction) { - with_store_mut(|s| { - let mut items = vec![]; - if let Some(actions) = s.migration_action.get(&tg_path) { - items.clone_from(actions.as_ref()); - } - // remove previous rt action if any - items.retain(|v| v.runtime_name.ne(&rt_migration.runtime_name)); - items.push(rt_migration); - s.migration_action.insert(tg_path, items.into()); - }) - } - - pub fn get_per_runtime_migration_action( - tg_path: &PathBuf, - ) -> Option> { - with_store(|s| { - if let Some(mig_action) = s.migration_action.get(tg_path) { - println!( - "specific migration action was defined for {}", - tg_path.display() - ); - return Some(mig_action.as_ref().to_owned()); - } - None - }) - } - - pub fn set_prefix(prefix: Option) { - with_store_mut(|s| s.prefix = prefix) - } - - pub fn get_prefix() -> Option { - with_store(|s| s.prefix.to_owned()) - } - - pub fn set_artifact_resolution_flag(value: bool) { - with_store_mut(|s| s.artifact_resolution = Some(value)) - } - - /// true by default - pub fn get_artifact_resolution_flag() -> bool { - with_store(|s| s.artifact_resolution.unwrap_or(true)) - } - - pub fn set_codegen_flag(value: bool) { - with_store_mut(|s| s.codegen = Some(value)) - } - - pub fn get_codegen_flag() -> bool { - with_store(|s| s.codegen.unwrap_or(false)) - } -} diff --git a/meta-cli/src/main.rs b/meta-cli/src/main.rs index 502e4eebf3..8f0b3c337e 100644 --- a/meta-cli/src/main.rs +++ b/meta-cli/src/main.rs @@ -33,7 +33,6 @@ mod interlude { } mod cli; -mod com; mod config; pub mod deploy; mod fs; From 671ab4f440dcaae469724b6a793619249b2b8305 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Mon, 17 Jun 2024 16:15:12 +0300 Subject: [PATCH 34/35] process wait timeout --- meta-cli/src/deploy/actors/task.rs | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/meta-cli/src/deploy/actors/task.rs b/meta-cli/src/deploy/actors/task.rs index 15acda8376..c59dfae9c0 100644 --- a/meta-cli/src/deploy/actors/task.rs +++ b/meta-cli/src/deploy/actors/task.rs @@ -74,7 +74,6 @@ enum TaskOutput { Deployed { deployed: String }, } -// TODO cli param const TIMEOUT_ENV_NAME: &str = "LOADER_TIMEOUT_SECS"; const DEFAULT_TIMEOUT: u64 = 120; @@ -117,7 +116,6 @@ where action_generator, action: initial_action, results: Default::default(), - // TODO doc? timeout_duration: Duration::from_secs( std::env::var(TIMEOUT_ENV_NAME) .map(|s| { @@ -266,9 +264,13 @@ impl Handler> for TaskActor { let action = self.action.clone(); let fut = async move { - // TODO timeout? - match Box::into_pin(process.wait_with_output()).await { - Ok(output) => { + match tokio::time::timeout( + Duration::from_secs(5), + Box::into_pin(process.wait_with_output()), + ) + .await + { + Ok(Ok(output)) => { if output.status.success() { if let Some(followup_options) = followup_options { addr.do_send(RestartProcessWithOptions(followup_options)) @@ -290,12 +292,21 @@ impl Handler> for TaskActor { addr.do_send(Exit(TaskFinishStatus::::Error)); } } - Err(e) => { + Ok(Err(e)) => { console.error( action.get_error_message(&format!("could not read process status: {e:#}")), ); addr.do_send(Exit(TaskFinishStatus::::Error)); } + Err(e) => { + // timeout + console.error( + action.get_error_message(&format!( + "timeout waiting the process to exit: {e:#}" + )), + ); + addr.do_send(Exit(TaskFinishStatus::::Error)); + } } }; From 8c666c83c1de61d37a2b2f2793784fec7b1981f0 Mon Sep 17 00:00:00 2001 From: Natoandro Date: Mon, 17 Jun 2024 16:16:19 +0300 Subject: [PATCH 35/35] update CLI docs --- website/docs/reference/meta-cli/available-commands.mdx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/website/docs/reference/meta-cli/available-commands.mdx b/website/docs/reference/meta-cli/available-commands.mdx index 34fc78b482..ba737455d0 100644 --- a/website/docs/reference/meta-cli/available-commands.mdx +++ b/website/docs/reference/meta-cli/available-commands.mdx @@ -34,6 +34,6 @@ meta gen mod -h ## Environment variables -| Name | Description | Default value | -| -------------- | ----------------------------- | ------------- | -| LOADER_TIMEOUT | Set deploy timeout in seconds | 120 | +| Name | Description | Default value | +| ------------------- | ----------------------------- | ------------- | +| LOADER_TIMEOUT_SECS | Set deploy timeout in seconds | 120 |